From 2dc8105c20095968c65244b2fa6175baa28c521e Mon Sep 17 00:00:00 2001 From: Christian Klutz Date: Fri, 10 Mar 2023 17:21:46 +0100 Subject: [PATCH 01/11] Provide array sampling --- .../ObjectSizeTests.cs | 175 ++++++++++++-- src/ManagedObjectSize/ObjectSize.cs | 227 +++++++++++++----- src/ManagedObjectSize/ObjectSizeOptions.cs | 125 +++++++++- src/SampleApp/Program.cs | 81 ++++++- 4 files changed, 517 insertions(+), 91 deletions(-) diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index 67d3c83..6cca98b 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -18,7 +18,8 @@ public void ObjectSize_AbortsIfCancellationIsRequested() Assert.ThrowsException(() => { - ObjectSize.GetObjectInclusiveSize("", ObjectSizeOptions.Default, out _, null, cts.Token); + var options = new ObjectSizeOptions { CancellationToken = cts.Token }; + ObjectSize.GetObjectInclusiveSize("", options); }); } } @@ -26,15 +27,14 @@ public void ObjectSize_AbortsIfCancellationIsRequested() [TestMethod] public void ObjectSize_UsesTimeoutIfConfigured() { - // Shortest possible timeout is 1 tick. - // For any non-null object graph that should be small enough to actually trigger the - // timeout - hopefully. If we see spurious test failures here, we might need to re- - // check or provide some sort of mock support for the timeout calculation inside. - var timeout = TimeSpan.FromTicks(1); - Assert.ThrowsException(() => { - ObjectSize.GetObjectInclusiveSize(new ExampleHolder(), ObjectSizeOptions.Default, out _, timeout); + // Shortest possible timeout is 1 tick. + // For any non-null object graph that should be small enough to actually trigger the + // timeout - hopefully. If we see spurious test failures here, we might need to re- + // check or provide some sort of mock support for the timeout calculation inside. + var options = new ObjectSizeOptions { Timeout = TimeSpan.FromTicks(1) }; + ObjectSize.GetObjectInclusiveSize(new ExampleHolder(), options); }); } @@ -44,6 +44,140 @@ public void ObjectSize_Null_ReturnsZero() Assert.AreEqual(0, ObjectSize.GetObjectInclusiveSize(null)); } + [TestMethod] + public void ObjectSize_IsStable() + { + long size = ObjectSize.GetObjectInclusiveSize(CreateData()); + + for (int i = 0; i < 10; i++) + { + Assert.AreEqual(size, ObjectSize.GetObjectInclusiveSize(CreateData())); + } + + static object CreateData() => Enumerable.Repeat("all of same size", 100).ToList(); + } + + [DataTestMethod] + [DataRow(2)] + [DataRow(5)] + [DataRow(10)] + [DataRow(100)] + [DataRow(101)] + public void ObjectSize_ArrayReferences_Sampled(int sampleCount) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData()); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + //static object CreateData() => Enumerable.Repeat(, 100).ToList(); + static object CreateData() + { + var result = new List(); + for (int i = 0; i < 100; i++) + { + result.Add(new ExampleType()); + } + return result; + } + } + + [DataTestMethod] + [DataRow(true, 2)] + [DataRow(true, 5)] + [DataRow(true, 10)] + [DataRow(true, 100)] + [DataRow(true, 101)] + [DataRow(false, 2)] + [DataRow(false, 5)] + [DataRow(false, 10)] + [DataRow(false, 100)] + [DataRow(false, 101)] + public void ObjectSize_ArrayReferenceWithStringMember_Sampled(bool equalStrings, int sampleCount) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(bool equal) + { + var result = new List(); + for (int i = 0; i < 100; i++) + { + var obj = new ExampleHolder(); + obj.StringValue = equal ? "ccccc" : Guid.NewGuid().ToString(); + result.Add(obj); + } + return result; + } + } + + [DataTestMethod] + [DataRow(true, 2)] + [DataRow(true, 5)] + [DataRow(true, 10)] + [DataRow(true, 100)] + [DataRow(true, 101)] + [DataRow(false, 2)] + [DataRow(false, 5)] + [DataRow(false, 10)] + [DataRow(false, 100)] + [DataRow(false, 101)] + public void ObjectSize_ArrayStrings_Sampled(bool equalStrings, int sampleCount) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(bool equal) + { + var result = new List(); + for (int i = 0; i < 100; i++) + { + result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); + } + return result; + } + } + + [DataTestMethod] + [DataRow(2)] + [DataRow(5)] + [DataRow(10)] + [DataRow(100)] + [DataRow(101)] + public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData()); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData() => Enumerable.Repeat(42, 100).ToList(); + } + // We could also use [DynamicData] to conduct the test of different objects/types, which would // result in possibly better diagnostics for failed tests, continue running if one test fails, // and report the "true" number of tests, not just 2 as it is now. @@ -53,9 +187,9 @@ public void ObjectSize_Null_ReturnsZero() // spawning createdump.exe, reloading the temp, etc.). [DataTestMethod] - [DataRow(ObjectSizeOptions.Default)] - [DataRow(ObjectSizeOptions.UseRtHelpers)] - public unsafe void ObjectSize_ReportsCorrectSize(ObjectSizeOptions options) + [DataRow(false)] + [DataRow(true)] + public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) { var data = new Dictionary(); @@ -78,7 +212,8 @@ public unsafe void ObjectSize_ReportsCorrectSize(ObjectSizeOptions options) var valueArray = new int[] { 1, 2, 3 }; var valueRefArray = new[] { new ValueTypeWithRef("1"), new ValueTypeWithRef("1") }; - var refArray = new[] { new TypeWithRef("1"), new TypeWithRef("2") }; + var refArray = new[] { new ExampleType(), new ExampleType() }; + var refWithStringArray = new[] { new TypeWithStringRef("1"), new TypeWithStringRef("2") }; var pointerArray = new void*[] { (void*)ObjectSize.GetHeapPointer(@string), (void*)ObjectSize.GetHeapPointer(empty) }; var emptyValueArray = new int[] { }; var emptyRefArray = new Empty[] { }; @@ -89,7 +224,10 @@ public unsafe void ObjectSize_ReportsCorrectSize(ObjectSizeOptions options) string internedString2 = String.Intern("INTERNED"); var internedStrings = new string[] { internedString1, internedString2 }; - // options |= ObjectSizeOptions.DebugOutput; + var options = new ObjectSizeOptions(); + options.UseRtHelpers = useRtHelpers; + options.DebugOutput = true; + GetSize(options, empty, data); GetSize(options, valueEmpty, data); GetSize(options, @string, data); @@ -106,6 +244,7 @@ public unsafe void ObjectSize_ReportsCorrectSize(ObjectSizeOptions options) GetSize(options, valueArray, data); GetSize(options, valueRefArray, data); GetSize(options, refArray, data); + GetSize(options, refWithStringArray, data); GetSize(options, pointerArray, data); GetSize(options, emptyValueArray, data); GetSize(options, emptyValueRefArray, data); @@ -131,7 +270,7 @@ public unsafe void ObjectSize_ReportsCorrectSize(ObjectSizeOptions options) Assert.AreEqual(data[address].Type.FullName, clrObj.Type?.ToString(), currentName + " Type"); // Compare actual sizes - (int count, ulong inclusiveSize, ulong exclusiveSize) = ObjSize(clrObj, (options & ObjectSizeOptions.DebugOutput) != 0); + (int count, ulong inclusiveSize, ulong exclusiveSize) = ObjSize(clrObj, options.DebugOutput); Assert.AreEqual(data[address].Count, count, currentName + " Count"); Assert.AreEqual(data[address].InclusiveSize, (long)inclusiveSize, currentName + " InclusiveSize"); Assert.AreEqual(data[address].ExclusiveSize, (long)exclusiveSize, currentName + " ExclusiveSize"); @@ -170,7 +309,7 @@ private static (int count, ulong size, ulong excSize) ObjSize(ClrObject input, b if (debugOutput) { - Console.WriteLine($"[{count:N0}] {(totalSize - curr.Size):N0} -> {totalSize:N0} ({curr.Size:N0}: {curr.Type})"); + Console.WriteLine($"[CLRMD] [{count:N0}] {(totalSize - curr.Size):N0} -> {totalSize:N0} ({curr.Size:N0}: {curr.Type})"); } foreach (var obj in curr.EnumerateReferences(carefully: false, considerDependantHandles: false)) @@ -184,7 +323,7 @@ private static (int count, ulong size, ulong excSize) ObjSize(ClrObject input, b if (debugOutput) { - Console.WriteLine($"total: {totalSize:N0} ({input.Type})"); + Console.WriteLine($"[CLRMD] total: {totalSize:N0} ({input.Type})"); } return (count, totalSize, input.Size); @@ -255,9 +394,9 @@ public ValueTypeWithRef(string s) public string Value; } - private class TypeWithRef + private class TypeWithStringRef { - public TypeWithRef(string s) + public TypeWithStringRef(string s) { Value = s; } diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index af06722..478ee45 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -1,6 +1,10 @@ -using System.Reflection; +using System.Diagnostics; +using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; +using System.Security.Cryptography; +using System.Threading.Tasks.Dataflow; +using System.Xml.Linq; namespace ManagedObjectSize { @@ -24,7 +28,7 @@ public static class ObjectSize /// /// Object to calculate size of. /// Approximate size of managed object. - public static long GetObjectExclusiveSize(object? obj) => GetObjectExclusiveSize(obj, ObjectSizeOptions.Default); + public static long GetObjectExclusiveSize(object? obj) => GetObjectExclusiveSize(obj, null); /// /// Calculates approximate memory size of object itself, not accounting for sizes of referenced objects. @@ -32,9 +36,11 @@ public static class ObjectSize /// Object to calculate size of. /// Options to apply during calculation. /// Approximate size of managed object. - public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions options) + public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions? options) { - if ((options & ObjectSizeOptions.UseRtHelpers) != 0) + options = (options ?? new()).GetReadOnly(); + + if (options.UseRtHelpers) { return GetObjectExclusiveSizeRtHelpers(obj); } @@ -47,7 +53,7 @@ public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions options /// /// Object to calculate size of. /// Approximate size of managed object and its reference graph. - public static long GetObjectInclusiveSize(object? obj) => GetObjectInclusiveSize(obj, ObjectSizeOptions.Default, out _); + public static long GetObjectInclusiveSize(object? obj) => GetObjectInclusiveSize(obj, null, out _); /// /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. @@ -55,7 +61,7 @@ public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions options /// Object to calculate size of. /// Options to apply during calculation. /// Approximate size of managed object and its reference graph. - public static long GetObjectInclusiveSize(object? obj, ObjectSizeOptions options) => GetObjectInclusiveSize(obj, options, out _); + public static long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options) => GetObjectInclusiveSize(obj, options, out _); /// /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. @@ -63,37 +69,57 @@ public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions options /// Object to calculate size of. /// Options to apply during calculation. /// Outputs the number of object references seen during calculation. - /// Time after which the operation is to be aborted; null disables timeout. - /// Cancel the operation. /// Approximate size of managed object and its reference graph. - /// The has been canceled. - /// The has elapsed. - /// An invalid was specified. - public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions options, out long count, - TimeSpan? timeout = null, - CancellationToken cancellationToken = default) + /// The . has been canceled. + /// The has elapsed. + public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options, out long count) { - long stopTime = GetStopTime(timeout); - long totalSize = 0; - count = 0; - if (obj == null) { - return totalSize; + count = 0; + return 0; } + options = (options ?? new()).GetReadOnly(); + var eval = new Stack(); - var considered = new HashSet(); + var state = new EvaluationState + { + Considered = new(), + StopTime = options.GetStopTime(Environment.TickCount64), + Options = options + }; eval.Push(obj); + long totalSize = ProcessEvaluationStack(eval, state, out count); + + if (options.DebugOutput) + { + Console.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); + } + + return totalSize; + } + + private class EvaluationState + { + public long StopTime { get; set; } + public ObjectSizeOptions Options { get; set; } = null!; + public HashSet Considered { get; set; } = null!; + } + + private static unsafe long ProcessEvaluationStack(Stack eval, EvaluationState state, out long count) + { + count = 0; + long totalSize = 0; while (eval.Count > 0) { // Check abort conditions. - cancellationToken.ThrowIfCancellationRequested(); - if (stopTime != -1) + state.Options.CancellationToken.ThrowIfCancellationRequested(); + if (state.StopTime != -1) { - CheckStopTime(stopTime, totalSize, count, timeout); + CheckStopTime(state.StopTime, totalSize, count, state.Options.Timeout); } var currentObject = eval.Pop(); @@ -104,7 +130,7 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions } ulong objAddr = (ulong)GetHeapPointer(currentObject); - if (!considered.Add(objAddr)) + if (!state.Considered.Add(objAddr)) { // Already seen this object. continue; @@ -117,11 +143,20 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions continue; } - long currSize = GetObjectExclusiveSize(currentObject, options); - count++; + long currSize; + if (currentObject is ArraySample arraySample) + { + currSize = arraySample.Size; + count += arraySample.ElementCount; + } + else + { + currSize = GetObjectExclusiveSize(currentObject, state.Options); + count++; + } totalSize += currSize; - if ((options & ObjectSizeOptions.DebugOutput) != 0) + if (state.Options.DebugOutput) { Console.WriteLine($"[{count:N0}] {(totalSize - currSize):N0} -> {totalSize:N0} ({currSize:N0}: {currentObject.GetType()})"); } @@ -135,33 +170,15 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions if (currentType.IsArray) { - HandleArray(eval, considered, currentObject, currentType); + HandleArray(eval, state, currentObject, currentType); } - - AddFields(eval, considered, currentObject, currentType); - } - - if ((options & ObjectSizeOptions.DebugOutput) != 0) - { - Console.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); - } - - return totalSize; - } - - private static long GetStopTime(TimeSpan? timeout) - { - if (timeout != null) - { - if (timeout.Value.TotalMilliseconds < 0 || timeout.Value.TotalMilliseconds > (int.MaxValue - 1)) + else { - throw new ArgumentOutOfRangeException(nameof(timeout), timeout, null); + AddFields(eval, state.Considered, currentObject, currentType); } - - return Environment.TickCount64 + (int)(timeout.Value.TotalMilliseconds + 0.5); } - return -1; + return totalSize; } private static void CheckStopTime(long stopAt, long totalSize, long count, TimeSpan? timeout) @@ -174,30 +191,122 @@ private static void CheckStopTime(long stopAt, long totalSize, long count, TimeS } } - private static unsafe void HandleArray(Stack eval, HashSet considered, object obj, Type objType) + private static unsafe void HandleArray(Stack eval, EvaluationState state, object obj, Type objType) { var elementType = objType.GetElementType(); if (elementType != null && !elementType.IsPointer) { - foreach (object element in (System.Collections.IEnumerable)obj) + if (state.Options.ArraySampleCount != null) { - if (element != null) + int sampleCount = state.Options.ArraySampleCount.GetValueOrDefault(); + + // If we have less (actual, see comment about elementCount below) elements than the + // sample size. Use the non sampled approach. + if (!HasMoreElements(obj, sampleCount)) { - if (!elementType.IsValueType) + HandleArrayNonSampled(eval, state, obj, elementType); + return; + } + + int elementCount = 0; + var localEval = new Stack(); + var unused = new HashSet(); + + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (element != null) { - ulong elementAddr = (ulong)GetHeapPointer(element); - if (!considered.Contains(elementAddr)) + // We're only counting the elements that are actually non-null. This might + // be less then the size of the array, when the array contains null elements. + // On the other hand, if we could every element, we also count excess elements. + // For example, the extra (unused) capacity of a List<>. + // Only considering non-null elements is still correct, however, because null + // elements don't contribute to the size. + elementCount++; + + if (elementCount <= sampleCount) { - eval.Push(element); + HandleArrayElement(localEval, unused, elementType, element); } } - else + } + + if (localEval.Count > 0) + { + double sampleSize = ProcessEvaluationStack(localEval, state, out long uniqueAddressCount); + + var sample = new ArraySample { - AddFields(eval, considered, element, elementType); - } + Size = (long)((sampleSize / uniqueAddressCount) * elementCount), + ElementCount = elementCount + }; + + eval.Push(sample); + } + } + else + { + HandleArrayNonSampled(eval, state, obj, elementType); + } + } + } + + private static void AddRange(HashSet first, HashSet second) + { + foreach (ulong s in second) + { + first.Add(s); + } + } + + private class ArraySample + { + public long Size { get; set; } + public int ElementCount { get; set; } + } + + private static bool HasMoreElements(object obj, int max) + { + int count = 0; + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (element != null) + { + count++; + if (count > max) + { + return true; } } } + return false; + } + + private static unsafe void HandleArrayNonSampled(Stack eval, EvaluationState state, object obj, Type elementType) + { + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (element != null) + { + HandleArrayElement(eval, state.Considered, elementType, element); + } + } + } + + private static unsafe void HandleArrayElement(Stack eval, HashSet considered, Type elementType, object element) + { + if (!elementType.IsValueType) + { + ulong elementAddr = (ulong)GetHeapPointer(element); + if (!considered.Contains(elementAddr)) + { + eval.Push(element); + } + } + else + { + AddFields(eval, considered, element, elementType); + } } private static unsafe void AddFields(Stack eval, HashSet considered, object currentObject, Type objType) diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 81261e2..156462b 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -1,11 +1,126 @@  +using System.Threading; + namespace ManagedObjectSize { - [Flags] - public enum ObjectSizeOptions + public class ObjectSizeOptions { - Default = 0, - DebugOutput = 1 << 3, - UseRtHelpers = 1 << 4 + private bool m_debugOutput; + private bool m_useRtHelpers; + private int? m_arraySampleCount; + private TimeSpan? m_timeout; + private CancellationToken m_cancellationToken; + private TextWriter m_debugWriter = Console.Out; + + public CancellationToken CancellationToken + { + get => m_cancellationToken; + set + { + CheckReadOnly(); + m_cancellationToken = value; + } + } + + public TimeSpan? Timeout + { + get => m_timeout; + set + { + CheckReadOnly(); + + if (value != null) + { + if (value.Value.TotalMilliseconds < 0 || value.Value.TotalMilliseconds > (int.MaxValue - 1)) + { + throw new ArgumentOutOfRangeException(nameof(value), value, null); + } + } + + m_timeout = value; + } + } + + public TextWriter DebugWriter + { + get => m_debugWriter; + set + { + CheckReadOnly(); + m_debugWriter = value ?? Console.Out; + } + } + + public bool DebugOutput + { + get => m_debugOutput; + set + { + CheckReadOnly(); + m_debugOutput = value; + } + } + + public bool UseRtHelpers + { + get => m_useRtHelpers; + set + { + CheckReadOnly(); + m_useRtHelpers = value; + } + } + + public int? ArraySampleCount + { + get => m_arraySampleCount; + set + { + CheckReadOnly(); + if (value != null) + { + if (value.Value < 2) + { + throw new ArgumentOutOfRangeException(nameof(value), value.Value, "Need at least a sample count of two"); + } + } + m_arraySampleCount = value; + } + } + + internal long GetStopTime(long ticksNow) + { + if (Timeout != null) + { + return ticksNow + (int)(Timeout.Value.TotalMilliseconds + 0.5); + } + + return -1; + } + + public bool IsReadOnly { get; private set; } + + internal ObjectSizeOptions GetReadOnly() + { + var result = new ObjectSizeOptions + { + DebugOutput = m_debugOutput, + UseRtHelpers = m_useRtHelpers, + ArraySampleCount = m_arraySampleCount, + Timeout = m_timeout, + CancellationToken = m_cancellationToken, + DebugWriter = m_debugWriter, + IsReadOnly = true + }; + return result; + } + + private void CheckReadOnly() + { + if (IsReadOnly) + { + throw new InvalidOperationException("Cannot change a read only instance"); + } + } } } \ No newline at end of file diff --git a/src/SampleApp/Program.cs b/src/SampleApp/Program.cs index 7e012fd..19d1ec2 100644 --- a/src/SampleApp/Program.cs +++ b/src/SampleApp/Program.cs @@ -7,24 +7,86 @@ internal class Program { static void Main(string[] args) { - var graph = CreateObjectGraph(1_000_000); var sw = Stopwatch.StartNew(); + var graph = CreateObjectGraph(100_000_000, true); + sw.Stop(); + Console.WriteLine("Object created: " + sw.Elapsed); + Console.Out.Flush(); + + sw = Stopwatch.StartNew(); long size = ObjectSize.GetObjectInclusiveSize(graph); sw.Stop(); - Console.WriteLine(size + ": " + sw.Elapsed); + Console.WriteLine("Full: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + + sw = Stopwatch.StartNew(); + size = ObjectSize.GetObjectInclusiveSize(graph, new ObjectSizeOptions { ArraySampleCount = 1000 }); + sw.Stop(); + Console.WriteLine("Sample: " + size.ToString("N0") + " bytes : " + sw.Elapsed); } - private static object CreateObjectGraph(int num) +#if false +Object created: 00:01:27.3333068 +10.377.777.676 bytes : 00:02:09.7285067 + +Object created: 00:00:54.2183866 +10.377.755.170 bytes : 00:01:23.4178055 + +Object created: 00:00:50.5841990 +10.278.925.504 bytes : 00:01:13.4623666 + +Object created: 00:02:39.7571474 +Full: 10.377.777.868 bytes : 00:02:20.4062759 +Sample: 800.085.782 bytes : 00:00:02.3662649 + +Object created: 00:02:27.7242993 +Full: 10.600.000.088 bytes : 00:02:29.2508853 +Sample: 800.097.990 bytes : 00:00:01.1667667 + +#endif + + private static GraphObject CreateObjectGraph(int num, bool inner = false) { - var graph = new GraphObject(); - graph.ListField = new List(num); - for (int i = 0; i < num; i++) + var graph = new GraphObject { - graph.ListField.Add(new GraphNodeObject + ListField = new List(num) + }; + + int digits = (int)Math.Log10(num) + 1; + var options = new ParallelOptions { MaxDegreeOfParallelism = inner ? 1 : Environment.ProcessorCount }; + Parallel.For(0, num, options, + () => new List(), + (i, state, local) => + { + var node = new GraphNodeObject { StringField = "Node#" + i.ToString().PadRight(digits) }; + if (!inner) + { + node.ObjectField = CreateObjectGraph(100, true); + } + local.Add(node); + return local; + }, + local => { - StringField = "Node#" + i + lock (graph.ListField) + { + graph.ListField.AddRange(local); + } }); - } + + //Parallel.For(0, num, i => + //{ + // var node = new GraphNodeObject { StringField = "Node#" + i }; + // if (!inner) + // { + // node.ObjectField = CreateObjectGraph(10_000, true); + // } + + // lock (graph.ListField) + // { + // graph.ListField.Add(node); + // } + //}); + return graph; } @@ -39,6 +101,7 @@ private class GraphNodeObject public double DoubleField; public int IntField; public string StringField; + public GraphObject ObjectField; } } } \ No newline at end of file From 7f85b847031eac7c0a178385fe3677cca0363164 Mon Sep 17 00:00:00 2001 From: Christian Klutz Date: Mon, 13 Mar 2023 09:48:29 +0100 Subject: [PATCH 02/11] Add sampling based on confidence level --- .../ObjectSizeOptionsTests.cs | 19 ++ .../ObjectSizeTests.cs | 225 ++++++++++++------ src/ManagedObjectSize/ObjectSize.cs | 120 +++++++--- src/ManagedObjectSize/ObjectSizeOptions.cs | 69 +++++- src/ManagedObjectSize/Utils.cs | 178 ++++++++++++++ src/SampleApp/Program.cs | 3 +- 6 files changed, 498 insertions(+), 116 deletions(-) create mode 100644 src/ManagedObjectSize.Tests/ObjectSizeOptionsTests.cs create mode 100644 src/ManagedObjectSize/Utils.cs diff --git a/src/ManagedObjectSize.Tests/ObjectSizeOptionsTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeOptionsTests.cs new file mode 100644 index 0000000..0bca2a2 --- /dev/null +++ b/src/ManagedObjectSize.Tests/ObjectSizeOptionsTests.cs @@ -0,0 +1,19 @@ +using System; + +namespace ManagedObjectSize.Tests +{ + [TestClass] + public class ObjectSizeOptionsTests + { + [DataTestMethod] + [DataRow(0.95, 5, 100, 80)] + [DataRow(0.99, 5, 100, 87)] + [DataRow(0.95, 5, 100_000_000, 384)] + [DataRow(0.99, 5, 100_000_000, 663)] + public void CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize, int expectedSampleSize) + { + int actualSampleSize = Utils.CalculateSampleCount(confidenceLevel, confidenceInterval, populationSize); + Assert.AreEqual(expectedSampleSize, actualSampleSize); + } + } +} diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index 6cca98b..f2633ce 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -57,29 +57,24 @@ public void ObjectSize_IsStable() static object CreateData() => Enumerable.Repeat("all of same size", 100).ToList(); } - [DataTestMethod] - [DataRow(2)] - [DataRow(5)] - [DataRow(10)] - [DataRow(100)] - [DataRow(101)] - public void ObjectSize_ArrayReferences_Sampled(int sampleCount) + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferences_Sampled(int sampleCount, int count) { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData()); + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(), options); + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); // This *should* be true, because in our test data every element has the same size. // In real live scenarios, where elements may vary in size, this will not be true // most of the time. Assert.AreEqual(directSize, sampledSize); - //static object CreateData() => Enumerable.Repeat(, 100).ToList(); - static object CreateData() + static object CreateData(int count) { var result = new List(); - for (int i = 0; i < 100; i++) + for (int i = 0; i < count; i++) { result.Add(new ExampleType()); } @@ -87,33 +82,75 @@ static object CreateData() } } - [DataTestMethod] - [DataRow(true, 2)] - [DataRow(true, 5)] - [DataRow(true, 10)] - [DataRow(true, 100)] - [DataRow(true, 101)] - [DataRow(false, 2)] - [DataRow(false, 5)] - [DataRow(false, 10)] - [DataRow(false, 100)] - [DataRow(false, 101)] - public void ObjectSize_ArrayReferenceWithStringMember_Sampled(bool equalStrings, int sampleCount) + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(int count) => Enumerable.Repeat(42, count).ToList(); + } + + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferenceWithValueTypeMember_Sampled(int sampleCount, int count) { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings)); + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings), options); + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); // This *should* be true, because in our test data every element has the same size. // In real live scenarios, where elements may vary in size, this will not be true // most of the time. Assert.AreEqual(directSize, sampledSize); - static object CreateData(bool equal) + static object CreateData(int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(new ExampleValue()); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferenceWithStringMember_Sampled(bool equalStrings, int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); + + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } + + static object CreateData(bool equal, int count) { var result = new List(); - for (int i = 0; i < 100; i++) + for (int i = 0; i < count; i++) { var obj = new ExampleHolder(); obj.StringValue = equal ? "ccccc" : Guid.NewGuid().ToString(); @@ -123,33 +160,33 @@ static object CreateData(bool equal) } } - [DataTestMethod] - [DataRow(true, 2)] - [DataRow(true, 5)] - [DataRow(true, 10)] - [DataRow(true, 100)] - [DataRow(true, 101)] - [DataRow(false, 2)] - [DataRow(false, 5)] - [DataRow(false, 10)] - [DataRow(false, 100)] - [DataRow(false, 101)] - public void ObjectSize_ArrayStrings_Sampled(bool equalStrings, int sampleCount) + [TestMethod] + [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayStrings_Sampled(bool equalStrings, int sampleCount, int count) { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings)); + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings), options); + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } - static object CreateData(bool equal) + static object CreateData(bool equal, int count) { var result = new List(); - for (int i = 0; i < 100; i++) + for (int i = 0; i < count; i++) { result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); } @@ -157,25 +194,37 @@ static object CreateData(bool equal) } } - [DataTestMethod] - [DataRow(2)] - [DataRow(5)] - [DataRow(10)] - [DataRow(100)] - [DataRow(101)] - public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount) + [TestMethod] + [DynamicData(nameof(GetWithStringSampleConfidences), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayStrings_SampledWithConfidence(bool equalStrings, double confidenceLevel, int count) { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData()); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(), options); + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); + var options = new ObjectSizeOptions { ArraySampleConfidenceLevel = confidenceLevel }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } - static object CreateData() => Enumerable.Repeat(42, 100).ToList(); + static object CreateData(bool equal, int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); + } + return result; + } } // We could also use [DynamicData] to conduct the test of different objects/types, which would @@ -210,10 +259,12 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) selfRef.Ref.Ref = selfRef; var withPointer = new TypeWithPointer { Ptr = (void*)ObjectSize.GetHeapPointer(@string) }; + var stringArray = new string[] { "ccccc", "ccccc", "ccccc", "ccccc", "ccccc", "ccccc" }; var valueArray = new int[] { 1, 2, 3 }; var valueRefArray = new[] { new ValueTypeWithRef("1"), new ValueTypeWithRef("1") }; var refArray = new[] { new ExampleType(), new ExampleType() }; - var refWithStringArray = new[] { new TypeWithStringRef("1"), new TypeWithStringRef("2") }; + var refWithDifferentStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("aaaaa") }; + var refWithSameStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("bbbbb") }; var pointerArray = new void*[] { (void*)ObjectSize.GetHeapPointer(@string), (void*)ObjectSize.GetHeapPointer(empty) }; var emptyValueArray = new int[] { }; var emptyRefArray = new Empty[] { }; @@ -226,7 +277,7 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) var options = new ObjectSizeOptions(); options.UseRtHelpers = useRtHelpers; - options.DebugOutput = true; + //options.DebugOutput = true; GetSize(options, empty, data); GetSize(options, valueEmpty, data); @@ -241,10 +292,12 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) GetSize(options, selfRef, data); GetSize(options, withPointer, data); + GetSize(options, stringArray, data); GetSize(options, valueArray, data); GetSize(options, valueRefArray, data); GetSize(options, refArray, data); - GetSize(options, refWithStringArray, data); + GetSize(options, refWithDifferentStringsArray, data); + GetSize(options, refWithSameStringsArray, data); GetSize(options, pointerArray, data); GetSize(options, emptyValueArray, data); GetSize(options, emptyValueRefArray, data); @@ -329,6 +382,44 @@ private static (int count, ulong size, ulong excSize) ObjSize(ClrObject input, b return (count, totalSize, input.Size); } + private static readonly int[] s_sampleSizesFor100 = new[] { 2, 5, 10, 50, 75, 99, 100, 101 }; + + private static IEnumerable GetWithStringSampleSizes() + { + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { true, size, 100 }; + } + + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { false, size, 100 }; + } + } + + private static IEnumerable GetSampleSizes() + { + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { size, 100 }; + } + } + + private static readonly double[] s_sampleConfidences = new[] { 0.9, 0.95, 0.99 }; + + private static IEnumerable GetWithStringSampleConfidences() + { + foreach (var confidenceLevel in s_sampleConfidences) + { + yield return new object[] { true, confidenceLevel, 10_000 }; + } + + foreach (var confidenceLevel in s_sampleConfidences) + { + yield return new object[] { false, confidenceLevel, 10_000 }; + } + } + private class Empty { } private struct ValueEmpty { } diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 478ee45..841565d 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -2,6 +2,7 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; +using System.Runtime.InteropServices.ObjectiveC; using System.Security.Cryptography; using System.Threading.Tasks.Dataflow; using System.Xml.Linq; @@ -95,7 +96,7 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? if (options.DebugOutput) { - Console.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); + state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); } return totalSize; @@ -123,6 +124,7 @@ private static unsafe long ProcessEvaluationStack(Stack eval, Evaluation } var currentObject = eval.Pop(); + if (currentObject == null) { // Cannot get the size for a "null" object. @@ -158,7 +160,7 @@ private static unsafe long ProcessEvaluationStack(Stack eval, Evaluation if (state.Options.DebugOutput) { - Console.WriteLine($"[{count:N0}] {(totalSize - currSize):N0} -> {totalSize:N0} ({currSize:N0}: {currentObject.GetType()})"); + state.Options.DebugWriter.WriteLine($"[{count:N0}] {(totalSize - currSize):N0} -> {totalSize:N0} ({currSize:N0}: {currentObject.GetType()})"); } if (currentType == typeof(string)) @@ -196,59 +198,88 @@ private static unsafe void HandleArray(Stack eval, EvaluationState state var elementType = objType.GetElementType(); if (elementType != null && !elementType.IsPointer) { - if (state.Options.ArraySampleCount != null) + (int sampleSize, int? populationSize) = GetSampleAndPopulateSize(state, obj, objType); + + if (sampleSize == 0) { - int sampleCount = state.Options.ArraySampleCount.GetValueOrDefault(); + HandleArrayNonSampled(eval, state, obj, elementType); + return; + } - // If we have less (actual, see comment about elementCount below) elements than the - // sample size. Use the non sampled approach. - if (!HasMoreElements(obj, sampleCount)) - { - HandleArrayNonSampled(eval, state, obj, elementType); - return; - } + // If we have less (actual, see comment about elementCount below) elements than the + // sample size. Use the non sampled approach. + if ((populationSize != null && populationSize <= sampleSize) || + !HasMoreElements(obj, sampleSize)) + { + HandleArrayNonSampled(eval, state, obj, elementType); + return; + } - int elementCount = 0; - var localEval = new Stack(); - var unused = new HashSet(); + int elementCount = 0; + var localEval = new Stack(); + var localConsidered = new HashSet(); - foreach (object element in (System.Collections.IEnumerable)obj) + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (element != null) { - if (element != null) + // We're only counting the elements that are actually non-null. This might + // be less then the size of the array, when the array contains null elements. + // On the other hand, if we could every element, we also count excess elements. + // For example, the extra (unused) capacity of a List<>. + // Only considering non-null elements is still correct, however, because null + // elements don't contribute to the size. + elementCount++; + + if (elementCount <= sampleSize) { - // We're only counting the elements that are actually non-null. This might - // be less then the size of the array, when the array contains null elements. - // On the other hand, if we could every element, we also count excess elements. - // For example, the extra (unused) capacity of a List<>. - // Only considering non-null elements is still correct, however, because null - // elements don't contribute to the size. - elementCount++; - - if (elementCount <= sampleCount) + ulong elementAddr = (ulong)GetHeapPointer(element); + if (!localConsidered.Contains(elementAddr)) { - HandleArrayElement(localEval, unused, elementType, element); + HandleArrayElement(localEval, localConsidered, elementType, element); + localConsidered.Add(elementAddr); } } } + } - if (localEval.Count > 0) - { - double sampleSize = ProcessEvaluationStack(localEval, state, out long uniqueAddressCount); + if (localEval.Count > 0) + { + double sizeOfSamples = ProcessEvaluationStack(localEval, state, out _); - var sample = new ArraySample - { - Size = (long)((sampleSize / uniqueAddressCount) * elementCount), - ElementCount = elementCount - }; + var sample = new ArraySample + { + Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), + ElementCount = elementCount + }; - eval.Push(sample); - } + eval.Push(sample); } - else + } + } + + private static unsafe (int SampleSize, int? PopulationSize) GetSampleAndPopulateSize(EvaluationState state, object obj, Type objType) + { + if (state.Options.ArraySampleCount != null) + { + return (state.Options.ArraySampleCount.Value, null); + } + else if (state.Options.ArraySampleConfidenceLevel != null) + { + // For size calculation we also only consider non-null elements, so here we have to do it as well. + // If we wouldn't, the population size would be too big and the sample size thus too small. + int populationSize = CountNonNullElements(obj); + int sampleSize = Utils.CalculateSampleCount(state.Options.ArraySampleConfidenceLevel.Value, state.Options.ArraySampleConfidenceInterval, populationSize); + + if (state.Options.DebugOutput) { - HandleArrayNonSampled(eval, state, obj, elementType); + state.Options.DebugWriter.WriteLine($"array {GetHeapPointer(obj)}/{objType}: population={populationSize:N0} sampleSize={sampleSize:N0}"); } + + return (sampleSize, populationSize); } + + return (0, null); } private static void AddRange(HashSet first, HashSet second) @@ -265,6 +296,19 @@ private class ArraySample public int ElementCount { get; set; } } + private static int CountNonNullElements(object obj) + { + int count = 0; + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (element != null) + { + count++; + } + } + return count; + } + private static bool HasMoreElements(object obj, int max) { int count = 0; diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 156462b..2c58a0c 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -11,6 +11,8 @@ public class ObjectSizeOptions private TimeSpan? m_timeout; private CancellationToken m_cancellationToken; private TextWriter m_debugWriter = Console.Out; + private double? m_arraySampleConfidenceLevel; + private int m_arraySampleConfidenceInterval = 5; public CancellationToken CancellationToken { @@ -29,12 +31,9 @@ public TimeSpan? Timeout { CheckReadOnly(); - if (value != null) + if (value != null && (value.Value.TotalMilliseconds < 0 || value.Value.TotalMilliseconds > (int.MaxValue - 1))) { - if (value.Value.TotalMilliseconds < 0 || value.Value.TotalMilliseconds > (int.MaxValue - 1)) - { - throw new ArgumentOutOfRangeException(nameof(value), value, null); - } + throw new ArgumentOutOfRangeException(nameof(value), value, null); } m_timeout = value; @@ -71,23 +70,71 @@ public bool UseRtHelpers } } + /// + /// Gets or sets a value that describes how many elements of an array should be checked at a maximum. + /// If the array contains less elements than this value, the array is processed as if sampling would + /// not have been enabled. Also see the remarks section. + /// + /// + /// The number of elements of an array to check at a maximum. The minimum value is 2. + /// Also see the remarks section. + /// + /// + /// Sampling will contain too high estimates, when the elements in the array share a lot of objects. + /// For example, if the array (elements) contain a lot of strings that are all the same (address). + /// This can be circumvented (a bit) by choosing a sample size that is not too small, compared to the + /// actual data. However, this quickly questions the usefulness of sampling in the first place. You + /// should use sampling only if you can live with number that are higher than the actual usage, or + /// when you know your data (to contain many unique objects). + /// public int? ArraySampleCount { get => m_arraySampleCount; set { CheckReadOnly(); - if (value != null) + + if (value != null && value.Value < 2) { - if (value.Value < 2) - { - throw new ArgumentOutOfRangeException(nameof(value), value.Value, "Need at least a sample count of two"); - } + throw new ArgumentOutOfRangeException(nameof(value), value.Value, "Need at least a sample count of two"); } + m_arraySampleCount = value; } } + public double? ArraySampleConfidenceLevel + { + get => m_arraySampleConfidenceLevel; + set + { + CheckReadOnly(); + + if (value != null && (value.Value > 100 || value.Value <= 0)) + { + throw new ArgumentOutOfRangeException(nameof(value), value.Value, "Value must be greater than zero and 100 or less"); + } + + m_arraySampleConfidenceLevel = value; + } + } + + public int ArraySampleConfidenceInterval + { + get => m_arraySampleConfidenceInterval; + set + { + CheckReadOnly(); + + if (value < 0) + { + throw new ArgumentOutOfRangeException(nameof(value), value, "Value cannot be negative"); + } + + m_arraySampleConfidenceInterval = value; + } + } + internal long GetStopTime(long ticksNow) { if (Timeout != null) @@ -107,6 +154,8 @@ internal ObjectSizeOptions GetReadOnly() DebugOutput = m_debugOutput, UseRtHelpers = m_useRtHelpers, ArraySampleCount = m_arraySampleCount, + ArraySampleConfidenceInterval = m_arraySampleConfidenceInterval, + ArraySampleConfidenceLevel = m_arraySampleConfidenceLevel, Timeout = m_timeout, CancellationToken = m_cancellationToken, DebugWriter = m_debugWriter, diff --git a/src/ManagedObjectSize/Utils.cs b/src/ManagedObjectSize/Utils.cs new file mode 100644 index 0000000..22c05f6 --- /dev/null +++ b/src/ManagedObjectSize/Utils.cs @@ -0,0 +1,178 @@ +using System; + +namespace ManagedObjectSize +{ + public class Utils + { + /// + /// Calculate required sample count for a given confidence level, interval and populate size. + /// + /// + /// + /// + /// + internal static int CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize) + { + if (populationSize <= 0) + { + return 0; + } + + double Z = QNorm((1 - confidenceLevel) / 2, 0.0, 1.0, true, false); + double p = 0.5; + double c = (double)confidenceInterval / 100; + double ss = (Math.Pow(Z, 2) * p * (1 - p)) / Math.Pow(c, 2); + double finiteSS = ss / (1 + ((ss - 1) / populationSize)); + + return (int)Math.Round(finiteSS); + } + + /// + /// Quantile function (Inverse CDF) for the normal distribution. + /// + /// Probability. + /// Mean of normal distribution. + /// Standard deviation of normal distribution. + /// If true, probability is P[X <= x], otherwise P[X > x]. + /// If true, probabilities are given as log(p). + /// P[X <= x] where x ~ N(mu,sigma^2) + /// See https://svn.r-project.org/R/trunk/src/nmath/qnorm.c + /// See https://stackoverflow.com/a/1674554/21567 + private static double QNorm(double p, double mu, double sigma, bool lower_tail, bool log_p) + { + if (double.IsNaN(p) || double.IsNaN(mu) || double.IsNaN(sigma)) return (p + mu + sigma); + double ans; + bool isBoundaryCase = R_Q_P01_boundaries(p, double.NegativeInfinity, double.PositiveInfinity, lower_tail, log_p, out ans); + if (isBoundaryCase) return (ans); + if (sigma < 0) return (double.NaN); + if (sigma == 0) return (mu); + + double p_ = R_DT_qIv(p, lower_tail, log_p); + double q = p_ - 0.5; + double r, val; + + if (Math.Abs(q) <= 0.425) // 0.075 <= p <= 0.925 + { + r = .180625 - q * q; + val = q * (((((((r * 2509.0809287301226727 + + 33430.575583588128105) * r + 67265.770927008700853) * r + + 45921.953931549871457) * r + 13731.693765509461125) * r + + 1971.5909503065514427) * r + 133.14166789178437745) * r + + 3.387132872796366608) + / (((((((r * 5226.495278852854561 + + 28729.085735721942674) * r + 39307.89580009271061) * r + + 21213.794301586595867) * r + 5394.1960214247511077) * r + + 687.1870074920579083) * r + 42.313330701600911252) * r + 1.0); + } + else + { + r = q > 0 ? R_DT_CIv(p, lower_tail, log_p) : p_; + r = Math.Sqrt(-((log_p && ((lower_tail && q <= 0) || (!lower_tail && q > 0))) ? p : Math.Log(r))); + + if (r <= 5) // <==> min(p,1-p) >= exp(-25) ~= 1.3888e-11 + { + r -= 1.6; + val = (((((((r * 7.7454501427834140764e-4 + + .0227238449892691845833) * r + .24178072517745061177) * + r + 1.27045825245236838258) * r + + 3.64784832476320460504) * r + 5.7694972214606914055) * + r + 4.6303378461565452959) * r + + 1.42343711074968357734) + / (((((((r * + 1.05075007164441684324e-9 + 5.475938084995344946e-4) * + r + .0151986665636164571966) * r + + .14810397642748007459) * r + .68976733498510000455) * + r + 1.6763848301838038494) * r + + 2.05319162663775882187) * r + 1.0); + } + else // very close to 0 or 1 + { + r -= 5.0; + val = (((((((r * 2.01033439929228813265e-7 + + 2.71155556874348757815e-5) * r + + .0012426609473880784386) * r + .026532189526576123093) * + r + .29656057182850489123) * r + + 1.7848265399172913358) * r + 5.4637849111641143699) * + r + 6.6579046435011037772) + / (((((((r * + 2.04426310338993978564e-15 + 1.4215117583164458887e-7) * + r + 1.8463183175100546818e-5) * r + + 7.868691311456132591e-4) * r + .0148753612908506148525) + * r + .13692988092273580531) * r + + .59983220655588793769) * r + 1.0); + } + if (q < 0.0) val = -val; + } + + return (mu + sigma * val); + } + private static bool R_Q_P01_boundaries(double p, double _LEFT_, double _RIGHT_, bool lower_tail, bool log_p, out double ans) + { + if (log_p) + { + if (p > 0.0) + { + ans = double.NaN; + return (true); + } + if (p == 0.0) + { + ans = lower_tail ? _RIGHT_ : _LEFT_; + return (true); + } + if (p == double.NegativeInfinity) + { + ans = lower_tail ? _LEFT_ : _RIGHT_; + return (true); + } + } + else + { + if (p < 0.0 || p > 1.0) + { + ans = double.NaN; + return (true); + } + if (p == 0.0) + { + ans = lower_tail ? _LEFT_ : _RIGHT_; + return (true); + } + if (p == 1.0) + { + ans = lower_tail ? _RIGHT_ : _LEFT_; + return (true); + } + } + ans = double.NaN; + return (false); + } + + private static double R_DT_qIv(double p, bool lower_tail, bool log_p) + { + return (log_p ? (lower_tail ? Math.Exp(p) : -ExpM1(p)) : R_D_Lval(p, lower_tail)); + } + + private static double R_DT_CIv(double p, bool lower_tail, bool log_p) + { + return (log_p ? (lower_tail ? -ExpM1(p) : Math.Exp(p)) : R_D_Cval(p, lower_tail)); + } + + private static double R_D_Lval(double p, bool lower_tail) + { + return lower_tail ? p : 0.5 - p + 0.5; + } + + private static double R_D_Cval(double p, bool lower_tail) + { + return lower_tail ? 0.5 - p + 0.5 : p; + } + private static double ExpM1(double x) + { + if (Math.Abs(x) < 1e-5) + return x + 0.5 * x * x; + else + return Math.Exp(x) - 1.0; + } + } +} diff --git a/src/SampleApp/Program.cs b/src/SampleApp/Program.cs index 19d1ec2..8a4e077 100644 --- a/src/SampleApp/Program.cs +++ b/src/SampleApp/Program.cs @@ -57,7 +57,8 @@ private static GraphObject CreateObjectGraph(int num, bool inner = false) () => new List(), (i, state, local) => { - var node = new GraphNodeObject { StringField = "Node#" + i.ToString().PadRight(digits) }; + //var node = new GraphNodeObject { StringField = "Node#" + i.ToString().PadRight(digits) }; + var node = new GraphNodeObject { StringField = "Node#" }; if (!inner) { node.ObjectField = CreateObjectGraph(100, true); From f2b867e00ff3b803c31b98e8ffd8ed07ce461cb5 Mon Sep 17 00:00:00 2001 From: Christian Klutz Date: Mon, 13 Mar 2023 10:37:45 +0100 Subject: [PATCH 03/11] Add sampling based on confidence level --- src/ManagedObjectSize.Tests/ObjectSizeTests.cs | 14 ++++++++++++++ src/ManagedObjectSize/ObjectSize.cs | 5 +++++ src/ManagedObjectSize/ObjectSizeOptions.cs | 14 ++++++++++++++ 3 files changed, 33 insertions(+) diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index f2633ce..bf8f87e 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -270,6 +270,18 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) var emptyRefArray = new Empty[] { }; var emptyValueRefArray = new ValueTypeWithRef[] { }; var emptyPointerArray = new void*[] { }; + var jaggedArray = new int[10][]; + for (int i = 0; i < 10; i++) + { + jaggedArray[i] = new[] { 1, 2, 3, 4, 5 }; + } + var multiDimensionalArray = new int[,] + { + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 } + }; string internedString1 = String.Intern("INTERNED"); string internedString2 = String.Intern("INTERNED"); @@ -303,6 +315,8 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) GetSize(options, emptyValueRefArray, data); GetSize(options, emptyRefArray, data); GetSize(options, emptyPointerArray, data); + GetSize(options, jaggedArray, data); + GetSize(options, multiDimensionalArray, data); GetSize(options, internedStrings, data); diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 841565d..6243bb4 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -208,6 +208,8 @@ private static unsafe void HandleArray(Stack eval, EvaluationState state // If we have less (actual, see comment about elementCount below) elements than the // sample size. Use the non sampled approach. + // PopulationSize has already been determined, if confidence level based sampling is + // configured. In this case, reuse the value. if ((populationSize != null && populationSize <= sampleSize) || !HasMoreElements(obj, sampleSize)) { @@ -216,6 +218,9 @@ private static unsafe void HandleArray(Stack eval, EvaluationState state } int elementCount = 0; + + // TODO: Should these be from a pool? Measure if cost is too high allocating if we have + // a "large" number of arrays to sample. var localEval = new Stack(); var localConsidered = new HashSet(); diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 2c58a0c..661542e 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -103,6 +103,20 @@ public int? ArraySampleCount } } + /// + /// Gets or sets a value that determines the sample size () based on a given + /// confidence level. + /// If the array contains less elements than the calculated sample size, the array is processed as if sampling would + /// not have been enabled. Also see the remarks section. + /// + /// + /// Sampling will contain too high estimates, when the elements in the array share a lot of objects. + /// For example, if the array (elements) contain a lot of strings that are all the same (address). + /// This can be circumvented (a bit) by choosing a sample size that is not too small, compared to the + /// actual data. However, this quickly questions the usefulness of sampling in the first place. You + /// should use sampling only if you can live with number that are higher than the actual usage, or + /// when you know your data (to contain many unique objects). + /// public double? ArraySampleConfidenceLevel { get => m_arraySampleConfidenceLevel; From 0b7c2c0a21b920b159e1d921c6ddc9bb23e86c1c Mon Sep 17 00:00:00 2001 From: Christian Klutz Date: Tue, 14 Mar 2023 06:08:02 +0100 Subject: [PATCH 04/11] Add sampling based on confidence level --- .../ObjectSizeTests.cs | 12 +- src/ManagedObjectSize/ObjectSize.cs | 221 ++++++++++++------ src/ManagedObjectSize/ObjectSizeOptions.cs | 92 +++++++- src/ManagedObjectSize/Utils.cs | 2 +- 4 files changed, 256 insertions(+), 71 deletions(-) diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index bf8f87e..cc8d579 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -96,7 +96,15 @@ public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount, int count) // most of the time. Assert.AreEqual(directSize, sampledSize); - static object CreateData(int count) => Enumerable.Repeat(42, count).ToList(); + static object CreateData(int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(i); + } + return result; + } } [TestMethod] @@ -415,7 +423,7 @@ private static IEnumerable GetSampleSizes() { foreach (var size in s_sampleSizesFor100) { - yield return new object[] { size, 100 }; + yield return new object[] { size, 123 }; } } diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 6243bb4..d727f6c 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -1,11 +1,7 @@ -using System.Diagnostics; +//#define FEATURE_STATISTICS using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -using System.Runtime.InteropServices.ObjectiveC; -using System.Security.Cryptography; -using System.Threading.Tasks.Dataflow; -using System.Xml.Linq; namespace ManagedObjectSize { @@ -92,8 +88,19 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? }; eval.Push(obj); + +#if FEATURE_STATISTICS + state.StartStatistics(); + state.UpdateEval(eval); +#endif + long totalSize = ProcessEvaluationStack(eval, state, out count); +#if FEATURE_STATISTICS + state.StopStatistics(); + state.DumpStatistics(totalSize); +#endif + if (options.DebugOutput) { state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); @@ -107,6 +114,43 @@ private class EvaluationState public long StopTime { get; set; } public ObjectSizeOptions Options { get; set; } = null!; public HashSet Considered { get; set; } = null!; + +#if FEATURE_STATISTICS + private long m_started; + private long m_complete; + private int m_maxConsidered; + private int m_sampleMaxConsidered; + private int m_maxEval; + private int m_sampleMaxEval; + private int m_sampled; + private int m_notSampled; + private int m_arrays; + + public void DumpStatistics(long totalSize) + { +#pragma warning disable HAA0601 // Value type to reference type conversion causing boxing allocation + Options.DebugWriter.WriteLine("STATISTICS"); + Options.DebugWriter.WriteLine($" enabled options : {Options.GetEnabledString()}"); + Options.DebugWriter.WriteLine($" elapsed : {new TimeSpan(m_complete - m_started)}"); + Options.DebugWriter.WriteLine($" total size : {totalSize.ToString("N0")} bytes"); + Options.DebugWriter.WriteLine($" max seen/evaluated : {m_maxConsidered:N0}/{m_maxEval:N0}"); + Options.DebugWriter.WriteLine($" arrays : {m_arrays:N0}"); + Options.DebugWriter.WriteLine($" not sampled : {m_notSampled:N0}"); + Options.DebugWriter.WriteLine($" sampled : {m_sampled:N0}"); + Options.DebugWriter.WriteLine($" max seen/evaluated : {m_sampleMaxConsidered:N0}/{m_sampleMaxEval:N0}"); +#pragma warning restore HAA0601 // Value type to reference type conversion causing boxing allocation + } + + public void StartStatistics() => m_started = System.Diagnostics.Stopwatch.GetTimestamp(); + public void StopStatistics() => m_complete = System.Diagnostics.Stopwatch.GetTimestamp(); + public void UpdateConsidered() => m_maxConsidered = Math.Max(Considered.Count, m_maxConsidered); + public void UpdateSampleConsidered(HashSet considered) => m_sampleMaxConsidered = Math.Max(considered.Count, m_sampleMaxConsidered); + public void UpdateEval(Stack eval) => m_maxEval = Math.Max(eval.Count, m_maxEval); + public void UpdateSampleEval(Stack eval) => m_sampleMaxEval = Math.Max(eval.Count, m_sampleMaxEval); + public void UpdateSampled() => m_sampled++; + public void UpdateNotSampled() => m_notSampled++; + public void UpdateArrays() => m_arrays++; +#endif } private static unsafe long ProcessEvaluationStack(Stack eval, EvaluationState state, out long count) @@ -138,6 +182,10 @@ private static unsafe long ProcessEvaluationStack(Stack eval, Evaluation continue; } +#if FEATURE_STATISTICS + state.UpdateConsidered(); +#endif + var currentType = currentObject.GetType(); if (currentType == typeof(Pointer) || currentType.IsPointer) { @@ -198,93 +246,122 @@ private static unsafe void HandleArray(Stack eval, EvaluationState state var elementType = objType.GetElementType(); if (elementType != null && !elementType.IsPointer) { - (int sampleSize, int? populationSize) = GetSampleAndPopulateSize(state, obj, objType); - - if (sampleSize == 0) +#if FEATURE_STATISTICS + state.UpdateArrays(); +#endif + (int sampleSize, int? populationSize, bool always) = GetSampleAndPopulateSize(state, obj, objType); + + // Only sample if: + // - the "always" flag has not been set in options + // - we have determined an actual sample size + // - if the total number of elements in the array is not less than the sample size + if (!always && ( + sampleSize == 0 || + (populationSize != null && populationSize <= sampleSize) || + HasLessElements(obj, sampleSize, elementType)) + ) { HandleArrayNonSampled(eval, state, obj, elementType); - return; } - - // If we have less (actual, see comment about elementCount below) elements than the - // sample size. Use the non sampled approach. - // PopulationSize has already been determined, if confidence level based sampling is - // configured. In this case, reuse the value. - if ((populationSize != null && populationSize <= sampleSize) || - !HasMoreElements(obj, sampleSize)) + else { - HandleArrayNonSampled(eval, state, obj, elementType); - return; + HandleArraySampled(eval, state, obj, elementType, sampleSize); } + } + } - int elementCount = 0; + private static unsafe void HandleArraySampled(Stack eval, EvaluationState state, object obj, Type? elementType, int sampleSize) + { +#if FEATURE_STATISTICS + state.UpdateSampled(); +#endif - // TODO: Should these be from a pool? Measure if cost is too high allocating if we have - // a "large" number of arrays to sample. - var localEval = new Stack(); - var localConsidered = new HashSet(); + int elementCount = 0; - foreach (object element in (System.Collections.IEnumerable)obj) + // TODO: Should these be from a pool? Measure if cost is too high allocating if we have + // a "large" number of arrays to sample. + var localEval = new Stack(); + var localConsidered = new HashSet(); + + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (ShouldCountElement(element, elementType)) { - if (element != null) + // We're only counting the elements that are actually non-null. This might + // be less then the size of the array, when the array contains null elements. + // On the other hand, if we could every element, we also count excess elements. + // For example, the extra (unused) capacity of a List<>. + // Only considering non-null elements is still correct, however, because null + // elements don't contribute to the size. + elementCount++; + + if (elementCount <= sampleSize) { - // We're only counting the elements that are actually non-null. This might - // be less then the size of the array, when the array contains null elements. - // On the other hand, if we could every element, we also count excess elements. - // For example, the extra (unused) capacity of a List<>. - // Only considering non-null elements is still correct, however, because null - // elements don't contribute to the size. - elementCount++; - - if (elementCount <= sampleSize) + ulong elementAddr = (ulong)GetHeapPointer(element); + if (!localConsidered.Contains(elementAddr)) { - ulong elementAddr = (ulong)GetHeapPointer(element); - if (!localConsidered.Contains(elementAddr)) - { - HandleArrayElement(localEval, localConsidered, elementType, element); - localConsidered.Add(elementAddr); - } + HandleArrayElement(localEval, localConsidered, elementType, element); + localConsidered.Add(elementAddr); +#if FEATURE_STATISTICS + state.UpdateSampleConsidered(localConsidered); + state.UpdateSampleEval(localEval); +#endif } } } + } - if (localEval.Count > 0) - { - double sizeOfSamples = ProcessEvaluationStack(localEval, state, out _); - - var sample = new ArraySample - { - Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), - ElementCount = elementCount - }; + if (localEval.Count > 0) + { + double sizeOfSamples = ProcessEvaluationStack(localEval, state, out _); - eval.Push(sample); - } + var sample = new ArraySample + { + Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), + ElementCount = elementCount + }; + + eval.Push(sample); +#if FEATURE_STATISTICS + state.UpdateEval(eval); +#endif } } - private static unsafe (int SampleSize, int? PopulationSize) GetSampleAndPopulateSize(EvaluationState state, object obj, Type objType) + private static unsafe (int SampleSize, int? PopulationSize, bool Always) GetSampleAndPopulateSize(EvaluationState state, object obj, Type elementType) { - if (state.Options.ArraySampleCount != null) + if (state.Options.AlwaysUseArraySampleAlgorithm) { - return (state.Options.ArraySampleCount.Value, null); + int populationSize = CountNonNullElements(obj, elementType); + return (populationSize, populationSize, true); + } + else if (state.Options.ArraySampleCount != null) + { + int sampleSize = state.Options.ArraySampleCount.Value; + + if (state.Options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"array {GetHeapPointer(obj)}/{elementType}[]: sampleSize={sampleSize:N0}"); + } + + return (sampleSize, null, false); } else if (state.Options.ArraySampleConfidenceLevel != null) { // For size calculation we also only consider non-null elements, so here we have to do it as well. // If we wouldn't, the population size would be too big and the sample size thus too small. - int populationSize = CountNonNullElements(obj); + int populationSize = CountNonNullElements(obj, elementType); int sampleSize = Utils.CalculateSampleCount(state.Options.ArraySampleConfidenceLevel.Value, state.Options.ArraySampleConfidenceInterval, populationSize); if (state.Options.DebugOutput) { - state.Options.DebugWriter.WriteLine($"array {GetHeapPointer(obj)}/{objType}: population={populationSize:N0} sampleSize={sampleSize:N0}"); + state.Options.DebugWriter.WriteLine($"array {GetHeapPointer(obj)}/{elementType}[]: population={populationSize:N0} sampleSize={sampleSize:N0}"); } - return (sampleSize, populationSize); + return (sampleSize, populationSize, false); } - return (0, null); + return (0, null, false); } private static void AddRange(HashSet first, HashSet second) @@ -301,12 +378,20 @@ private class ArraySample public int ElementCount { get; set; } } - private static int CountNonNullElements(object obj) + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool ShouldCountElement(object element, Type elementType) => elementType.IsValueType || element != null; + + private static int CountNonNullElements(object obj, Type elementType) { + if (elementType.IsValueType) + { + return ((Array)obj).Length; + } + int count = 0; foreach (object element in (System.Collections.IEnumerable)obj) { - if (element != null) + if (ShouldCountElement(element, elementType)) { count++; } @@ -314,28 +399,32 @@ private static int CountNonNullElements(object obj) return count; } - private static bool HasMoreElements(object obj, int max) + private static bool HasLessElements(object obj, int max, Type elementType) { int count = 0; foreach (object element in (System.Collections.IEnumerable)obj) { - if (element != null) + if (ShouldCountElement(element, elementType)) { count++; - if (count > max) + if (count >= max) { - return true; + return false; } } } - return false; + return true; } private static unsafe void HandleArrayNonSampled(Stack eval, EvaluationState state, object obj, Type elementType) { +#if FEATURE_STATISTICS + state.UpdateNotSampled(); +#endif + foreach (object element in (System.Collections.IEnumerable)obj) { - if (element != null) + if (ShouldCountElement(element, elementType)) { HandleArrayElement(eval, state.Considered, elementType, element); } diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 661542e..44e2054 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -1,4 +1,5 @@  +using System.Text; using System.Threading; namespace ManagedObjectSize @@ -13,6 +14,7 @@ public class ObjectSizeOptions private TextWriter m_debugWriter = Console.Out; private double? m_arraySampleConfidenceLevel; private int m_arraySampleConfidenceInterval = 5; + private bool m_alwaysUseArraySampleAlgorithm; public CancellationToken CancellationToken { @@ -70,6 +72,20 @@ public bool UseRtHelpers } } + /// + /// EXPERIMENTAL/INTERNAL USE ONLY Gets or sets a value that causes - the potentially more expensive - + /// sample algorithm to be used for every array, regardless of the other settings concerning sampling. + /// + public bool AlwaysUseArraySampleAlgorithm + { + get => m_alwaysUseArraySampleAlgorithm; + set + { + CheckReadOnly(); + m_alwaysUseArraySampleAlgorithm = value; + } + } + /// /// Gets or sets a value that describes how many elements of an array should be checked at a maximum. /// If the array contains less elements than this value, the array is processed as if sampling would @@ -104,7 +120,7 @@ public int? ArraySampleCount } /// - /// Gets or sets a value that determines the sample size () based on a given + /// EXPERIMENTAL Gets or sets a value that determines the sample size () based on a given /// confidence level. /// If the array contains less elements than the calculated sample size, the array is processed as if sampling would /// not have been enabled. Also see the remarks section. @@ -133,6 +149,9 @@ public double? ArraySampleConfidenceLevel } } + /// + /// EXPERIMENTAL (see ). + /// public int ArraySampleConfidenceInterval { get => m_arraySampleConfidenceInterval; @@ -168,8 +187,9 @@ internal ObjectSizeOptions GetReadOnly() DebugOutput = m_debugOutput, UseRtHelpers = m_useRtHelpers, ArraySampleCount = m_arraySampleCount, - ArraySampleConfidenceInterval = m_arraySampleConfidenceInterval, ArraySampleConfidenceLevel = m_arraySampleConfidenceLevel, + ArraySampleConfidenceInterval = m_arraySampleConfidenceInterval, + AlwaysUseArraySampleAlgorithm = m_alwaysUseArraySampleAlgorithm, Timeout = m_timeout, CancellationToken = m_cancellationToken, DebugWriter = m_debugWriter, @@ -185,5 +205,73 @@ private void CheckReadOnly() throw new InvalidOperationException("Cannot change a read only instance"); } } + + public string GetEnabledString() + { + var sb = new StringBuilder(); + if (UseRtHelpers) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(UseRtHelpers)).Append("=true"); + } + if (ArraySampleCount != null) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(ArraySampleCount)).Append('=').Append(ArraySampleCount.Value.ToString("N0")); + } + if (ArraySampleConfidenceLevel != null) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(ArraySampleConfidenceLevel)).Append('=').Append(ArraySampleConfidenceLevel.Value); + sb.Append(' '); + sb.Append(nameof(ArraySampleConfidenceInterval)).Append('=').Append(ArraySampleConfidenceInterval); + } + if (AlwaysUseArraySampleAlgorithm) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(AlwaysUseArraySampleAlgorithm)).Append("=true"); + } + if (Timeout != null) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(Timeout)).Append('=').Append(Timeout.Value); + } + if (DebugOutput) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(DebugOutput)).Append("=true"); + } + + if (sb.Length == 0) + { + sb.Append("(default)"); + } + + return sb.ToString(); + } } } \ No newline at end of file diff --git a/src/ManagedObjectSize/Utils.cs b/src/ManagedObjectSize/Utils.cs index 22c05f6..4dd281c 100644 --- a/src/ManagedObjectSize/Utils.cs +++ b/src/ManagedObjectSize/Utils.cs @@ -11,7 +11,7 @@ public class Utils /// /// /// - internal static int CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize) + public static int CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize) { if (populationSize <= 0) { From c7ad056d3e874b2f81050c41f44f7ff8b0f43463 Mon Sep 17 00:00:00 2001 From: cnkz Date: Fri, 5 Jan 2024 18:10:05 +0100 Subject: [PATCH 05/11] Statistics as runtime option, not #if directive. --- src/ManagedObjectSize/ObjectSize.cs | 1344 ++++++++++---------- src/ManagedObjectSize/ObjectSizeOptions.cs | 11 + src/ManagedObjectSize/Utils.cs | 2 +- src/SampleApp/Program.cs | 218 ++-- 4 files changed, 799 insertions(+), 776 deletions(-) diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 3f77cbc..256eefa 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -1,642 +1,654 @@ -//#define FEATURE_STATISTICS -using System.Diagnostics; -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -namespace ManagedObjectSize -{ - /// - /// Object memory size calculation. - /// - public static class ObjectSize - { - // - // Material: - // - https://github.com/dotnet/runtime/issues/24200 - // - https://devblogs.microsoft.com/premier-developer/managed-object-internals-part-1-layout/ - // - ClrMD ObjSize: algorithm (https://github.com/microsoft/clrmd) - // - https://github.com/dotnet/runtime: - // - https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/object.h - // - https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/methodtable.h - // - - /// - /// Calculates approximate memory size of object itself, not accounting for sizes of referenced objects. - /// - /// Object to calculate size of. - /// Approximate size of managed object. - public static long GetObjectExclusiveSize(object? obj) => GetObjectExclusiveSize(obj, null); - - /// - /// Calculates approximate memory size of object itself, not accounting for sizes of referenced objects. - /// - /// Object to calculate size of. - /// Options to apply during calculation. - /// Approximate size of managed object. - public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions? options) - { - options = (options ?? new()).GetReadOnly(); - - if (options.UseRtHelpers) - { - return GetObjectExclusiveSizeRtHelpers(obj); - } - - return GetObjectExclusiveSizeInternal(obj); - } - - /// - /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. - /// - /// Object to calculate size of. - /// Approximate size of managed object and its reference graph. - public static long GetObjectInclusiveSize(object? obj) => GetObjectInclusiveSize(obj, null, out _); - - /// - /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. - /// - /// Object to calculate size of. - /// Options to apply during calculation. - /// Approximate size of managed object and its reference graph. - public static long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options) => GetObjectInclusiveSize(obj, options, out _); - - /// - /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. - /// - /// Object to calculate size of. - /// Options to apply during calculation. - /// Outputs the number of object references seen during calculation. - /// Approximate size of managed object and its reference graph. - /// The . has been canceled. - /// The has elapsed. - public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options, out long count) - { - if (obj == null) - { - count = 0; - return 0; - } - - options = (options ?? new()).GetReadOnly(); - - var eval = new Stack(); - var state = new EvaluationState - { - Considered = new(ReferenceEqualityComparer.Instance), - StopTime = options.GetStopTime(Environment.TickCount64), - Options = options - }; - - eval.Push(obj); - - state.StartStatistics(); - state.UpdateEval(eval); - - long totalSize = ProcessEvaluationStack(eval, state, out count); - - state.StopStatistics(); - state.DumpStatistics(totalSize); - - if (options.DebugOutput) - { - state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); - } - - return totalSize; - } - - private class EvaluationState - { - public long StopTime { get; set; } - public ObjectSizeOptions Options { get; set; } = null!; - public HashSet Considered { get; set; } = null!; - - private long m_started; - private long m_completed; - private int m_maxConsidered; - private int m_sampleMaxConsidered; - private int m_maxEval; - private int m_sampleMaxEval; - private int m_sampled; - private int m_notSampled; +using System.Diagnostics; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +namespace ManagedObjectSize +{ + /// + /// Object memory size calculation. + /// + public static class ObjectSize + { + // + // Material: + // - https://github.com/dotnet/runtime/issues/24200 + // - https://devblogs.microsoft.com/premier-developer/managed-object-internals-part-1-layout/ + // - ClrMD ObjSize: algorithm (https://github.com/microsoft/clrmd) + // - https://github.com/dotnet/runtime: + // - https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/object.h + // - https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/methodtable.h + // + + /// + /// Calculates approximate memory size of object itself, not accounting for sizes of referenced objects. + /// + /// Object to calculate size of. + /// Approximate size of managed object. + public static long GetObjectExclusiveSize(object? obj) => GetObjectExclusiveSize(obj, null); + + /// + /// Calculates approximate memory size of object itself, not accounting for sizes of referenced objects. + /// + /// Object to calculate size of. + /// Options to apply during calculation. + /// Approximate size of managed object. + public static long GetObjectExclusiveSize(object? obj, ObjectSizeOptions? options) + { + options = (options ?? new()).GetReadOnly(); + + if (options.UseRtHelpers) + { + return GetObjectExclusiveSizeRtHelpers(obj); + } + + return GetObjectExclusiveSizeInternal(obj); + } + + /// + /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. + /// + /// Object to calculate size of. + /// Approximate size of managed object and its reference graph. + public static long GetObjectInclusiveSize(object? obj) => GetObjectInclusiveSize(obj, null, out _); + + /// + /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. + /// + /// Object to calculate size of. + /// Options to apply during calculation. + /// Approximate size of managed object and its reference graph. + public static long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options) => GetObjectInclusiveSize(obj, options, out _); + + /// + /// Calculates approximate memory size of object and its reference graph, recursively adding up sizes of referenced objects. + /// + /// Object to calculate size of. + /// Options to apply during calculation. + /// Outputs the number of object references seen during calculation. + /// Approximate size of managed object and its reference graph. + /// The . has been canceled. + /// The has elapsed. + public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options, out long count) + { + if (obj == null) + { + count = 0; + return 0; + } + + options = (options ?? new()).GetReadOnly(); + + var eval = new Stack(); + var state = new EvaluationState(options); + + eval.Push(obj); + + if (state.Statistics != null) + { + state.Statistics.Start(); + state.Statistics.UpdateEval(eval); + } + + long totalSize = ProcessEvaluationStack(eval, ref state, out count); + + if (state.Statistics != null) + { + state.Statistics.Stop(); + state.Statistics.Dump(totalSize); + } + + if (options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); + } + + return totalSize; + } + + private class Statistics + { + private long m_started; + private long m_completed; + private int m_considered; + private int m_maxConsidered; + private int m_sampleMaxConsidered; + private int m_maxEval; + private int m_sampleMaxEval; + private int m_sampled; + private int m_notSampled; private int m_arrays; - - [Conditional("FEATURE_STATISTICS")] - public void DumpStatistics(long totalSize) - { - Options.DebugWriter.WriteLine("STATISTICS"); - Options.DebugWriter.WriteLine($" enabled options : {Options.GetEnabledString()}"); - Options.DebugWriter.WriteLine($" elapsed : {new TimeSpan(m_completed - m_started)}"); - Options.DebugWriter.WriteLine($" total size : {totalSize:N0} bytes"); - Options.DebugWriter.WriteLine($" max seen/evaluated : {m_maxConsidered:N0}/{m_maxEval:N0}"); - Options.DebugWriter.WriteLine($" arrays : {m_arrays:N0}"); - Options.DebugWriter.WriteLine($" not sampled : {m_notSampled:N0}"); - Options.DebugWriter.WriteLine($" sampled : {m_sampled:N0}"); - Options.DebugWriter.WriteLine($" max seen/evaluated : {m_sampleMaxConsidered:N0}/{m_sampleMaxEval:N0}"); - } - - [Conditional("FEATURE_STATISTICS")] - public void StartStatistics() => m_started = Stopwatch.GetTimestamp(); - [Conditional("FEATURE_STATISTICS")] - public void StopStatistics() => m_completed = Stopwatch.GetTimestamp(); - [Conditional("FEATURE_STATISTICS")] - public void UpdateConsidered() => m_maxConsidered = Math.Max(Considered.Count, m_maxConsidered); - [Conditional("FEATURE_STATISTICS")] - public void UpdateSampleConsidered(HashSet considered) => m_sampleMaxConsidered = Math.Max(considered.Count, m_sampleMaxConsidered); - [Conditional("FEATURE_STATISTICS")] - public void UpdateEval(Stack eval) => m_maxEval = Math.Max(eval.Count, m_maxEval); - [Conditional("FEATURE_STATISTICS")] - public void UpdateSampleEval(Stack eval) => m_sampleMaxEval = Math.Max(eval.Count, m_sampleMaxEval); - [Conditional("FEATURE_STATISTICS")] - public void UpdateSampled() => m_sampled++; - [Conditional("FEATURE_STATISTICS")] - public void UpdateNotSampled() => m_notSampled++; - [Conditional("FEATURE_STATISTICS")] - public void UpdateArrays() => m_arrays++; - } - - private static unsafe long ProcessEvaluationStack(Stack eval, EvaluationState state, out long count) - { - count = 0; - long totalSize = 0; - - while (eval.Count > 0) - { - // Check abort conditions. - state.Options.CancellationToken.ThrowIfCancellationRequested(); - if (state.StopTime != -1) - { - CheckStopTime(state.StopTime, totalSize, count, state.Options.Timeout); - } - - var currentObject = eval.Pop(); - - if (currentObject == null) - { - // Cannot get the size for a "null" object. - continue; - } - - if (!state.Considered.Add(currentObject)) - { - // Already seen this object. - continue; - } - - state.UpdateConsidered(); - - var currentType = currentObject.GetType(); - if (currentType == typeof(Pointer) || currentType.IsPointer) - { - // Pointers are not considered. - continue; - } - - long currSize; - if (currentObject is ArraySample arraySample) - { - currSize = arraySample.Size; - count += arraySample.ElementCount; - } - else - { - currSize = GetObjectExclusiveSize(currentObject, state.Options); - count++; - } - totalSize += currSize; - - if (state.Options.DebugOutput) - { - state.Options.DebugWriter.WriteLine($"[{count:N0}] {(totalSize - currSize):N0} -> {totalSize:N0} ({currSize:N0}: {currentObject.GetType()})"); - } - - if (currentType == typeof(string)) - { - // String is a special object type in the CLR. We have already recorded the correct length of it - // by using GetObjectExclusiveSize(). - continue; - } - - if (currentType.IsArray) - { - HandleArray(eval, state, currentObject, currentType); - } - else - { - AddFields(eval, state.Considered, currentObject, currentType); - } - } - - return totalSize; - } - - private static void CheckStopTime(long stopAt, long totalSize, long count, TimeSpan? timeout) - { - if (Environment.TickCount64 >= stopAt) - { - throw new TimeoutException( - $"The allotted time of {timeout} to determine the inclusive size of the object (graph) has passed. " + - $"The incomplete result so far is {totalSize:N0} bytes for processing {count:N0} objects. "); - } - } - - private static unsafe void HandleArray(Stack eval, EvaluationState state, object obj, Type objType) - { - var elementType = objType.GetElementType(); - if (elementType != null && !elementType.IsPointer) - { - state.UpdateArrays(); - - (int sampleSize, int? populationSize, bool always) = GetSampleAndPopulateSize(state, obj, objType); - - // Only sample if: - // - the "always" flag has not been set in options - // - we have determined an actual sample size - // - if the total number of elements in the array is not less than the sample size - if (!always && ( - sampleSize == 0 || - (populationSize != null && populationSize <= sampleSize) || - HasLessElements(obj, sampleSize, elementType)) - ) - { - HandleArrayNonSampled(eval, state, obj, elementType); - } - else - { - HandleArraySampled(eval, state, obj, elementType, sampleSize); - } - } - } - - private static unsafe void HandleArraySampled(Stack eval, EvaluationState state, object obj, Type? elementType, int sampleSize) - { - state.UpdateSampled(); - - int elementCount = 0; - - // TODO: Should these be from a pool? Measure if cost is too high allocating if we have - // a "large" number of arrays to sample. - var localEval = new Stack(); - var localConsidered = new HashSet(ReferenceEqualityComparer.Instance); - - foreach (object element in (System.Collections.IEnumerable)obj) - { - if (ShouldCountElement(element, elementType)) - { - // We're only counting the elements that are actually non-null. This might - // be less then the size of the array, when the array contains null elements. - // On the other hand, if we could every element, we also count excess elements. - // For example, the extra (unused) capacity of a List<>. - // Only considering non-null elements is still correct, however, because null - // elements don't contribute to the size. - elementCount++; - - if (elementCount <= sampleSize) - { - if (!localConsidered.Contains(element)) - { - HandleArrayElement(localEval, localConsidered, elementType, element); - localConsidered.Add(element); - - state.UpdateSampleConsidered(localConsidered); - state.UpdateSampleEval(localEval); - } - } - } - } - - if (localEval.Count > 0) - { - double sizeOfSamples = ProcessEvaluationStack(localEval, state, out _); - - var sample = new ArraySample - { - Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), - ElementCount = elementCount - }; - - eval.Push(sample); - - state.UpdateEval(eval); - } - } - - private static unsafe (int SampleSize, int? PopulationSize, bool Always) GetSampleAndPopulateSize(EvaluationState state, object obj, Type elementType) - { - if (state.Options.AlwaysUseArraySampleAlgorithm) - { - int populationSize = CountNonNullElements(obj, elementType); - return (populationSize, populationSize, true); - } - else if (state.Options.ArraySampleCount != null) - { - int sampleSize = state.Options.ArraySampleCount.Value; - - if (state.Options.DebugOutput) - { - state.Options.DebugWriter.WriteLine($"array {Utils.GetVolatileHeapPointer(obj)}/{elementType}[]: sampleSize={sampleSize:N0}"); - } - - return (sampleSize, null, false); - } - else if (state.Options.ArraySampleConfidenceLevel != null) - { - // For size calculation we also only consider non-null elements, so here we have to do it as well. - // If we wouldn't, the population size would be too big and the sample size thus too small. - int populationSize = CountNonNullElements(obj, elementType); - int sampleSize = Utils.CalculateSampleCount(state.Options.ArraySampleConfidenceLevel.Value, state.Options.ArraySampleConfidenceInterval, populationSize); - - if (state.Options.DebugOutput) - { - state.Options.DebugWriter.WriteLine($"array {Utils.GetVolatileHeapPointer(obj)}/{elementType}[]: population={populationSize:N0} sampleSize={sampleSize:N0}"); - } - - return (sampleSize, populationSize, false); - } - - return (0, null, false); - } - - private static void AddRange(HashSet first, HashSet second) - { - foreach (ulong s in second) - { - first.Add(s); - } - } - - private class ArraySample - { - public long Size { get; set; } - public int ElementCount { get; set; } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - private static bool ShouldCountElement(object element, Type elementType) => elementType.IsValueType || element != null; - - private static int CountNonNullElements(object obj, Type elementType) - { - if (elementType.IsValueType) - { - return ((Array)obj).Length; - } - - int count = 0; - foreach (object element in (System.Collections.IEnumerable)obj) - { - if (ShouldCountElement(element, elementType)) - { - count++; - } - } - return count; - } - - private static bool HasLessElements(object obj, int max, Type elementType) - { - int count = 0; - foreach (object element in (System.Collections.IEnumerable)obj) - { - if (ShouldCountElement(element, elementType)) - { - count++; - if (count >= max) - { - return false; - } - } - } - return true; - } - - private static unsafe void HandleArrayNonSampled(Stack eval, EvaluationState state, object obj, Type elementType) - { - state.UpdateNotSampled(); - - foreach (object element in (System.Collections.IEnumerable)obj) - { - if (ShouldCountElement(element, elementType)) - { - HandleArrayElement(eval, state.Considered, elementType, element); - } - } - } - - private static unsafe void HandleArrayElement(Stack eval, HashSet considered, Type elementType, object element) - { - if (!elementType.IsValueType) - { - if (!considered.Contains(element)) - { - eval.Push(element); - } - } - else - { - AddFields(eval, considered, element, elementType); - } + private readonly ObjectSizeOptions m_options; + + public Statistics(ObjectSizeOptions options) + { + m_options = options; + } + + public void Start() => m_started = Stopwatch.GetTimestamp(); + public void Stop() => m_completed = Stopwatch.GetTimestamp(); + public void UpdateConsidered() => m_maxConsidered = Math.Max(++m_considered, m_maxConsidered); + public void UpdateSampleConsidered(HashSet considered) => m_sampleMaxConsidered = Math.Max(considered.Count, m_sampleMaxConsidered); + public void UpdateEval(Stack eval) => m_maxEval = Math.Max(eval.Count, m_maxEval); + public void UpdateSampleEval(Stack eval) => m_sampleMaxEval = Math.Max(eval.Count, m_sampleMaxEval); + public void UpdateSampled() => m_sampled++; + public void UpdateNotSampled() => m_notSampled++; + public void UpdateArrays() => m_arrays++; + + public void Dump(long totalSize) + { + m_options.DebugWriter.WriteLine("STATISTICS"); + m_options.DebugWriter.WriteLine($" enabled options : {m_options.GetEnabledString()}"); + m_options.DebugWriter.WriteLine($" elapsed : {new TimeSpan(m_completed - m_started)}"); + m_options.DebugWriter.WriteLine($" total size : {totalSize:N0} bytes"); + m_options.DebugWriter.WriteLine($" max seen/evaluated : {m_maxConsidered:N0}/{m_maxEval:N0}"); + m_options.DebugWriter.WriteLine($" arrays : {m_arrays:N0}"); + m_options.DebugWriter.WriteLine($" not sampled : {m_notSampled:N0}"); + m_options.DebugWriter.WriteLine($" sampled : {m_sampled:N0}"); + m_options.DebugWriter.WriteLine($" max seen/evaluated : {m_sampleMaxConsidered:N0}/{m_sampleMaxEval:N0}"); + } + } + + private readonly struct EvaluationState + { + public EvaluationState(ObjectSizeOptions options) + { + Options = options ?? throw new ArgumentNullException(nameof(options)); + StopTime = options.GetStopTime(Environment.TickCount64); + Considered = new HashSet(ReferenceEqualityComparer.Instance); + Statistics = options.CollectStatistics ? new(options) : null; + } + + public ObjectSizeOptions Options { get; } + public long StopTime { get; } + public HashSet Considered { get; } + public Statistics? Statistics { get; } + } + + private static unsafe long ProcessEvaluationStack(Stack eval, ref EvaluationState state, out long count) + { + count = 0; + long totalSize = 0; + + while (eval.Count > 0) + { + // Check abort conditions. + state.Options.CancellationToken.ThrowIfCancellationRequested(); + if (state.StopTime != -1) + { + CheckStopTime(state.StopTime, totalSize, count, state.Options.Timeout); + } + + var currentObject = eval.Pop(); + + if (currentObject == null) + { + // Cannot get the size for a "null" object. + continue; + } + + if (!state.Considered.Add(currentObject)) + { + // Already seen this object. + continue; + } + + state.Statistics?.UpdateConsidered(); + + var currentType = currentObject.GetType(); + if (currentType == typeof(Pointer) || currentType.IsPointer) + { + // Pointers are not considered. + continue; + } + + long currSize; + if (currentObject is ArraySample arraySample) + { + currSize = arraySample.Size; + count += arraySample.ElementCount; + } + else + { + currSize = GetObjectExclusiveSize(currentObject, state.Options); + count++; + } + totalSize += currSize; + + if (state.Options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"[{count:N0}] {(totalSize - currSize):N0} -> {totalSize:N0} ({currSize:N0}: {currentObject.GetType()})"); + } + + if (currentType == typeof(string)) + { + // String is a special object type in the CLR. We have already recorded the correct length of it + // by using GetObjectExclusiveSize(). + continue; + } + + if (currentType.IsArray) + { + HandleArray(eval, ref state, currentObject, currentType); + } + else + { + AddFields(eval, state.Considered, currentObject, currentType); + } + } + + return totalSize; + } + + private static void CheckStopTime(long stopAt, long totalSize, long count, TimeSpan? timeout) + { + if (Environment.TickCount64 >= stopAt) + { + throw new TimeoutException( + $"The allotted time of {timeout} to determine the inclusive size of the object (graph) has passed. " + + $"The incomplete result so far is {totalSize:N0} bytes for processing {count:N0} objects. "); + } + } + + private static unsafe void HandleArray(Stack eval, ref EvaluationState state, object obj, Type objType) + { + var elementType = objType.GetElementType(); + if (elementType != null && !elementType.IsPointer) + { + state.Statistics?.UpdateArrays(); + + (int sampleSize, int? populationSize, bool always) = GetSampleAndPopulateSize(ref state, obj, objType); + + // Only sample if: + // - the "always" flag has not been set in options + // - we have determined an actual sample size + // - if the total number of elements in the array is not less than the sample size + if (!always && ( + sampleSize == 0 || + (populationSize != null && populationSize <= sampleSize) || + HasLessElements(obj, sampleSize, elementType)) + ) + { + HandleArrayNonSampled(eval, ref state, obj, elementType); + } + else + { + HandleArraySampled(eval, ref state, obj, elementType, sampleSize); + } + } } - private static unsafe void AddFields(Stack eval, HashSet considered, object currentObject, Type objType) - { - foreach (var field in GetFields(objType)) - { - if (field.FieldType.IsValueType) - { - // Non reference type fields are "in place" in the actual type and thus are already included in - // GetObjectExclusiveSize(). This is also true for custom value types. However, the later might - // have reference type members. These need to be considered. So if the actual field we are dealing - // with is a value type, we search it (and all its fields) for reference type fields. If we haven't - // seen any of those before, we add it to be evaluated. - - var stack = new Stack(); - stack.Push(field.GetValue(currentObject)); - while (stack.Count > 0) - { - var currentValue = stack.Pop(); - if (currentValue == null) - { - continue; - } - - var fields = GetFields(currentValue.GetType()); - foreach (var f in fields) - { - object? value = f.GetValue(currentValue); - if (f.FieldType.IsValueType) - { - // Ignore primitive types (like System.Int32). Due to their - // nature (for example, System.Int32 has a field "m_value" of type - // System.Int32), they would lead to endless processing here. - if (!f.FieldType.IsPrimitive) - { - stack.Push(value); - } - } - else if (value != null) - { - // Found a reference type field/member inside the value type. - if (!considered.Contains(value) && !eval.Contains(value)) - { - eval.Push(value); - } - } - } - } - } - else - { - var fieldValue = field.GetValue(currentObject); - if (fieldValue != null) - { - if (!considered.Contains(fieldValue)) - { - eval.Push(fieldValue); - } - } - } - } - } - - private static IEnumerable GetFields(Type type) - { - foreach (var field in type.GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)) - { - yield return field; - } - - while (type.BaseType is not null) - { - foreach (var field in type.BaseType.GetFields(BindingFlags.NonPublic | BindingFlags.Instance)) - { - yield return field; - } - - type = type.BaseType; - } + private static unsafe void HandleArraySampled(Stack eval, ref EvaluationState state, object obj, Type elementType, int sampleSize) + { + state.Statistics?.UpdateSampled(); + + int elementCount = 0; + + // TODO: Should these be from a pool? Measure if cost is too high allocating if we have + // a "large" number of arrays to sample. + var localEval = new Stack(); + var localConsidered = new HashSet(ReferenceEqualityComparer.Instance); + + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (ShouldCountElement(element, elementType)) + { + // We're only counting the elements that are actually non-null. This might + // be less then the size of the array, when the array contains null elements. + // On the other hand, if we could every element, we also count excess elements. + // For example, the extra (unused) capacity of a List<>. + // Only considering non-null elements is still correct, however, because null + // elements don't contribute to the size. + elementCount++; + + if (elementCount <= sampleSize) + { + if (!localConsidered.Contains(element)) + { + HandleArrayElement(localEval, localConsidered, elementType, element); + localConsidered.Add(element); + + if (state.Statistics != null) + { + state.Statistics.UpdateSampleConsidered(localConsidered); + state.Statistics.UpdateSampleEval(localEval); + } + } + } + } + } + + if (localEval.Count > 0) + { + double sizeOfSamples = ProcessEvaluationStack(localEval, ref state, out _); + + var sample = new ArraySample + { + Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), + ElementCount = elementCount + }; + + eval.Push(sample); + + state.Statistics?.UpdateEval(eval); + } + } + + private static unsafe (int SampleSize, int? PopulationSize, bool Always) GetSampleAndPopulateSize(ref EvaluationState state, object obj, Type elementType) + { + if (state.Options.AlwaysUseArraySampleAlgorithm) + { + int populationSize = CountNonNullElements(obj, elementType); + return (populationSize, populationSize, true); + } + else if (state.Options.ArraySampleCount != null) + { + int sampleSize = state.Options.ArraySampleCount.Value; + + if (state.Options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"array {Utils.GetVolatileHeapPointer(obj)}/{elementType}[]: sampleSize={sampleSize:N0}"); + } + + return (sampleSize, null, false); + } + else if (state.Options.ArraySampleConfidenceLevel != null) + { + // For size calculation we also only consider non-null elements, so here we have to do it as well. + // If we wouldn't, the population size would be too big and the sample size thus too small. + int populationSize = CountNonNullElements(obj, elementType); + int sampleSize = Utils.CalculateSampleCount(state.Options.ArraySampleConfidenceLevel.Value, state.Options.ArraySampleConfidenceInterval, populationSize); + + if (state.Options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"array {Utils.GetVolatileHeapPointer(obj)}/{elementType}[]: population={populationSize:N0} sampleSize={sampleSize:N0}"); + } + + return (sampleSize, populationSize, false); + } + + return (0, null, false); + } + + private static void AddRange(HashSet first, HashSet second) + { + foreach (ulong s in second) + { + first.Add(s); + } + } + + private class ArraySample + { + public long Size { get; set; } + public int ElementCount { get; set; } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool ShouldCountElement(object element, Type elementType) => elementType.IsValueType || element != null; + + private static int CountNonNullElements(object obj, Type elementType) + { + if (elementType.IsValueType) + { + return ((Array)obj).Length; + } + + int count = 0; + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (ShouldCountElement(element, elementType)) + { + count++; + } + } + return count; + } + + private static bool HasLessElements(object obj, int max, Type elementType) + { + int count = 0; + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (ShouldCountElement(element, elementType)) + { + count++; + if (count >= max) + { + return false; + } + } + } + return true; + } + + private static unsafe void HandleArrayNonSampled(Stack eval, ref EvaluationState state, object obj, Type elementType) + { + state.Statistics?.UpdateNotSampled(); + + foreach (object element in (System.Collections.IEnumerable)obj) + { + if (ShouldCountElement(element, elementType)) + { + HandleArrayElement(eval, state.Considered, elementType, element); + } + } + } + + private static unsafe void HandleArrayElement(Stack eval, HashSet considered, Type elementType, object element) + { + if (!elementType.IsValueType) + { + if (!considered.Contains(element)) + { + eval.Push(element); + } + } + else + { + AddFields(eval, considered, element, elementType); + } + } + + private static unsafe void AddFields(Stack eval, HashSet considered, object currentObject, Type objType) + { + foreach (var field in GetFields(objType)) + { + if (field.FieldType.IsValueType) + { + // Non reference type fields are "in place" in the actual type and thus are already included in + // GetObjectExclusiveSize(). This is also true for custom value types. However, the later might + // have reference type members. These need to be considered. So if the actual field we are dealing + // with is a value type, we search it (and all its fields) for reference type fields. If we haven't + // seen any of those before, we add it to be evaluated. + + var stack = new Stack(); + stack.Push(field.GetValue(currentObject)); + while (stack.Count > 0) + { + var currentValue = stack.Pop(); + if (currentValue == null) + { + continue; + } + + var fields = GetFields(currentValue.GetType()); + foreach (var f in fields) + { + object? value = f.GetValue(currentValue); + if (f.FieldType.IsValueType) + { + // Ignore primitive types (like System.Int32). Due to their + // nature (for example, System.Int32 has a field "m_value" of type + // System.Int32), they would lead to endless processing here. + if (!f.FieldType.IsPrimitive) + { + stack.Push(value); + } + } + else if (value != null) + { + // Found a reference type field/member inside the value type. + if (!considered.Contains(value) && !eval.Contains(value)) + { + eval.Push(value); + } + } + } + } + } + else + { + var fieldValue = field.GetValue(currentObject); + if (fieldValue != null) + { + if (!considered.Contains(fieldValue)) + { + eval.Push(fieldValue); + } + } + } + } + } + + private static IEnumerable GetFields(Type type) + { + foreach (var field in type.GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)) + { + yield return field; + } + + while (type.BaseType is not null) + { + foreach (var field in type.BaseType.GetFields(BindingFlags.NonPublic | BindingFlags.Instance)) + { + yield return field; + } + + type = type.BaseType; + } + } + + // "Constants" are adapted from vm/object.h. + private static readonly uint ObjHeaderSize = (uint)IntPtr.Size; + private static readonly uint ObjSize = (uint)IntPtr.Size; + private static readonly uint ObjBaseSize = ObjHeaderSize + ObjSize; + private static readonly uint MinObjSize = (2 * (uint)IntPtr.Size) + ObjHeaderSize; + + // The CoreCLR provides an internal "RuntimeHelpers.GetRawObjectDataSize()" method. + // We don't want to use it by default, but allow calling it to compare results. + private delegate nuint GetRawObjectDataSize(object obj); + private static GetRawObjectDataSize? s_getRawObjectDataSize; + private static long GetObjectExclusiveSizeRtHelpers(object? obj) + { + if (obj == null) + { + return 0; + } + + var gros = LazyInitializer.EnsureInitialized(ref s_getRawObjectDataSize, () => + { + var bindingFlags = BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static; + var method = typeof(RuntimeHelpers).GetMethod("GetRawObjectDataSize", bindingFlags) + ?? throw new InvalidOperationException($"Method 'RuntimeHelpers.GetRawObjectDataSize()' not found"); + return (GetRawObjectDataSize)Delegate.CreateDelegate(typeof(GetRawObjectDataSize), method); + }); + + long size = (long)gros(obj); + // RuntimeHelpers.GetRawObjectDataSize strips off the "ObjectBaseSize", hence the name "Data". + // For our purposes we want it included. + size += ObjBaseSize; + + return size < MinObjSize ? MinObjSize : size; + } + + private static long GetObjectExclusiveSizeInternal(object? obj) + { + if (obj == null) + { + return 0; + } + + unsafe + { + var mt = GetMethodTable(obj); + long size = mt->BaseSize; + if (mt->HasComponentSize) + { + uint componentSize = mt->ComponentSize; + + if (componentSize > 0) + { + // Get number of components (strings and arrays) + int numComponents = checked((int)GetNumComponents(obj)); + + size += componentSize * numComponents; + } + } + + size = size < MinObjSize ? MinObjSize : size; + return size; + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static unsafe MethodTable* GetMethodTable(object obj) + { + // + // Get the MethodTable structure. The following code has been lifted from RuntimeHelpers.GetMethodTable(). + // + // In RuntimeHelpers, the method is internal, but the code is enlightening and especially so, when you look + // at the comment of the JIT instrinct that will be used instead of the actual implementation code below. + // + // (source: https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/jitinterface.cpp#L7243): + // + // In the CLR, an object is laid out as follows. + // [ object_header || MethodTable* (64-bit pointer) || instance_data ] + // ^ ^-- ref .firstField points here + // `-- reference (type O) points here + // + // [ snip more comment] + // + // Essentially, the ".firstField" part is what "GetRawData()" returns, we then go back by one (which is + // IntPtr.Size in bytes) to get the actual MethodTable*. + // + + return (MethodTable*)Unsafe.Add(ref Unsafe.As(ref GetRawData(obj)), -1); + + // IL (pseudo) code for what the JIT generates for the actual RuntimeHelpers.GetMethodTable() function + // (not for this one of course!) would be something like this: + // + // MethodTable* GetMethodTable(object obj) + // { + // ldarg_0 + // ldflda .firstField + // ldc_i4_s -IntPtr.Size + // add + // ldind_i + // ret + // } + // + // We could achieve the same using DynamicMethod and ILGenerator. However, the ".firstField" is what is + // tricky. The JIT can get this from internal CLR data structures, but for managed code it basically be "GetRawData()" again. + // So in the end we wouldn't have won too much by using IL. + // + // We could also just reflection invoke RuntimeHelpers.GetMethodTable(), but that is costly and relies on the method actually being + // there. The above approach also uses established information about objects are laid out and is thus more robust than + // invoking the internal RuntimeHelpers.GetMethodTable() method. + // + // + // Note: this works also + // + // return (MethodTable*)obj.GetType().TypeHandle.Value.ToPointer(); + // + // But since the CLR itself uses the above code internally, we rather stick with that. + } + + internal sealed class RawData + { + public byte Data; } - // "Constants" are adapted from vm/object.h. - private static readonly uint ObjHeaderSize = (uint)IntPtr.Size; - private static readonly uint ObjSize = (uint)IntPtr.Size; - private static readonly uint ObjBaseSize = ObjHeaderSize + ObjSize; - private static readonly uint MinObjSize = (2 * (uint)IntPtr.Size) + ObjHeaderSize; - - // The CoreCLR provides an internal "RuntimeHelpers.GetRawObjectDataSize()" method. - // We don't want to use it by default, but allow calling it to compare results. - private delegate nuint GetRawObjectDataSize(object obj); - private static GetRawObjectDataSize? s_getRawObjectDataSize; - private static long GetObjectExclusiveSizeRtHelpers(object? obj) - { - if (obj == null) - { - return 0; - } - - var gros = LazyInitializer.EnsureInitialized(ref s_getRawObjectDataSize, () => - { - var bindingFlags = BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static; - var method = typeof(RuntimeHelpers).GetMethod("GetRawObjectDataSize", bindingFlags) - ?? throw new InvalidOperationException($"Method 'RuntimeHelpers.GetRawObjectDataSize()' not found"); - return (GetRawObjectDataSize)Delegate.CreateDelegate(typeof(GetRawObjectDataSize), method); - }); - - long size = (long)gros(obj); - // RuntimeHelpers.GetRawObjectDataSize strips off the "ObjectBaseSize", hence the name "Data". - // For our purposes we want it included. - size += ObjBaseSize; - - return size < MinObjSize ? MinObjSize : size; - } - - private static long GetObjectExclusiveSizeInternal(object? obj) - { - if (obj == null) - { - return 0; - } - - unsafe - { - var mt = GetMethodTable(obj); - long size = mt->BaseSize; - if (mt->HasComponentSize) - { - uint componentSize = mt->ComponentSize; - - if (componentSize > 0) - { - // Get number of components (strings and arrays) - int numComponents = checked((int)GetNumComponents(obj)); - - size += componentSize * numComponents; - } - } - - size = size < MinObjSize ? MinObjSize : size; - return size; - } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static unsafe MethodTable* GetMethodTable(object obj) - { - // - // Get the MethodTable structure. The following code has been lifted from RuntimeHelpers.GetMethodTable(). - // - // In RuntimeHelpers, the method is internal, but the code is enlightening and especially so, when you look - // at the comment of the JIT instrinct that will be used instead of the actual implementation code below. - // - // (source: https://github.com/dotnet/runtime/blob/074a01611837db63e9fe1d7462916d47ed858a75/src/coreclr/vm/jitinterface.cpp#L7243): - // - // In the CLR, an object is laid out as follows. - // [ object_header || MethodTable* (64-bit pointer) || instance_data ] - // ^ ^-- ref .firstField points here - // `-- reference (type O) points here - // - // [ snip more comment] - // - // Essentially, the ".firstField" part is what "GetRawData()" returns, we then go back by one (which is - // IntPtr.Size in bytes) to get the actual MethodTable*. - // - - return (MethodTable*)Unsafe.Add(ref Unsafe.As(ref GetRawData(obj)), -1); - - // IL (pseudo) code for what the JIT generates for the actual RuntimeHelpers.GetMethodTable() function - // (not for this one of course!) would be something like this: - // - // MethodTable* GetMethodTable(object obj) - // { - // ldarg_0 - // ldflda .firstField - // ldc_i4_s -IntPtr.Size - // add - // ldind_i - // ret - // } - // - // We could achieve the same using DynamicMethod and ILGenerator. However, the ".firstField" is what is - // tricky. The JIT can get this from internal CLR data structures, but for managed code it basically be "GetRawData()" again. - // So in the end we wouldn't have won too much by using IL. - // - // We could also just reflection invoke RuntimeHelpers.GetMethodTable(), but that is costly and relies on the method actually being - // there. The above approach also uses established information about objects are laid out and is thus more robust than - // invoking the internal RuntimeHelpers.GetMethodTable() method. - // - // - // Note: this works also - // - // return (MethodTable*)obj.GetType().TypeHandle.Value.ToPointer(); - // - // But since the CLR itself uses the above code internally, we rather stick with that. - } - - internal sealed class RawData - { - public byte Data; - } - internal static ref byte GetRawData(object obj) => ref Unsafe.As(obj).Data; internal class RawArrayData @@ -644,36 +656,36 @@ internal class RawArrayData public uint Length; // Array._numComponents padded to IntPtr } - internal static ref uint GetNumComponents(object obj) => ref Unsafe.As(obj).Length; - - [StructLayout(LayoutKind.Explicit)] - internal unsafe struct MethodTable - { - // According to src\vm\methodtable.h we have the following members in the MethodTable (that interest us here; - // there a more). - // - // Offset Size - // [0x0000] 4 DWORD m_dwFlags; // Low WORD is component size for array and string types when - // // (m_dwFlags & enum_flag_HasComponentSize)!=0; otherwise flags. - // [0x0004] 4 DWORD m_BaseSize; // Base size of instance of this class when allocated on the heap - // [0x0008] 2 WORD m_wFlags2; - // [0x000A] 2 WORD m_wToken; // Class token if it fits into 16-bits. - // [0x000C] 2 WORD m_wNumVirtuals; - // [0x000E] 2 WORD m_wNumInterfaces; - // - - // Put both fields at index 0; access ComponentSize for respective value if "HasComponentSize == true" - [FieldOffset(0)] - public ushort ComponentSize; - [FieldOffset(0)] - private uint Flags; - [FieldOffset(4)] - public uint BaseSize; - - private const uint enum_flag_ContainsPointers = 0x01000000; - private const uint enum_flag_HasComponentSize = 0x80000000; - public bool HasComponentSize => (Flags & enum_flag_HasComponentSize) != 0; - public bool ContainsPointers => (Flags & enum_flag_ContainsPointers) != 0; - } - } + internal static ref uint GetNumComponents(object obj) => ref Unsafe.As(obj).Length; + + [StructLayout(LayoutKind.Explicit)] + internal unsafe struct MethodTable + { + // According to src\vm\methodtable.h we have the following members in the MethodTable (that interest us here; + // there a more). + // + // Offset Size + // [0x0000] 4 DWORD m_dwFlags; // Low WORD is component size for array and string types when + // // (m_dwFlags & enum_flag_HasComponentSize)!=0; otherwise flags. + // [0x0004] 4 DWORD m_BaseSize; // Base size of instance of this class when allocated on the heap + // [0x0008] 2 WORD m_wFlags2; + // [0x000A] 2 WORD m_wToken; // Class token if it fits into 16-bits. + // [0x000C] 2 WORD m_wNumVirtuals; + // [0x000E] 2 WORD m_wNumInterfaces; + // + + // Put both fields at index 0; access ComponentSize for respective value if "HasComponentSize == true" + [FieldOffset(0)] + public ushort ComponentSize; + [FieldOffset(0)] + private uint Flags; + [FieldOffset(4)] + public uint BaseSize; + + private const uint enum_flag_ContainsPointers = 0x01000000; + private const uint enum_flag_HasComponentSize = 0x80000000; + public bool HasComponentSize => (Flags & enum_flag_HasComponentSize) != 0; + public bool ContainsPointers => (Flags & enum_flag_ContainsPointers) != 0; + } + } } \ No newline at end of file diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 44e2054..c9a1c03 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -7,6 +7,7 @@ namespace ManagedObjectSize public class ObjectSizeOptions { private bool m_debugOutput; + private bool m_collectStatistics; private bool m_useRtHelpers; private int? m_arraySampleCount; private TimeSpan? m_timeout; @@ -62,6 +63,16 @@ public bool DebugOutput } } + public bool CollectStatistics + { + get => m_collectStatistics; + set + { + CheckReadOnly(); + m_collectStatistics = value; + } + } + public bool UseRtHelpers { get => m_useRtHelpers; diff --git a/src/ManagedObjectSize/Utils.cs b/src/ManagedObjectSize/Utils.cs index 1c9c91c..1d8af2b 100644 --- a/src/ManagedObjectSize/Utils.cs +++ b/src/ManagedObjectSize/Utils.cs @@ -3,7 +3,7 @@ namespace ManagedObjectSize { - public class Utils + public static class Utils { /// /// Returns a pointer to the object on the managed heap. The resulting address is not fixed, diff --git a/src/SampleApp/Program.cs b/src/SampleApp/Program.cs index 631e7ed..c5ef86a 100644 --- a/src/SampleApp/Program.cs +++ b/src/SampleApp/Program.cs @@ -1,110 +1,110 @@ -using ManagedObjectSize; -using System.Diagnostics; - -namespace SampleApp -{ - internal class Program - { - static void Main(string[] args) - { - var sw = Stopwatch.StartNew(); - var graph = CreateObjectGraph(100_000_000, true); - sw.Stop(); - Console.WriteLine("Object created: " + sw.Elapsed); - Console.Out.Flush(); - - sw = Stopwatch.StartNew(); - long size = ObjectSize.GetObjectInclusiveSize(graph); - sw.Stop(); - Console.WriteLine("Full: " + size.ToString("N0") + " bytes : " + sw.Elapsed); - - sw = Stopwatch.StartNew(); - size = ObjectSize.GetObjectInclusiveSize(graph, new ObjectSizeOptions { - ArraySampleCount = 1000 - }); - sw.Stop(); - Console.WriteLine("Sample: " + size.ToString("N0") + " bytes : " + sw.Elapsed); - } - -#if false -Object created: 00:01:27.3333068 -10.377.777.676 bytes : 00:02:09.7285067 - -Object created: 00:00:54.2183866 -10.377.755.170 bytes : 00:01:23.4178055 - -Object created: 00:00:50.5841990 -10.278.925.504 bytes : 00:01:13.4623666 - -Object created: 00:02:39.7571474 -Full: 10.377.777.868 bytes : 00:02:20.4062759 -Sample: 800.085.782 bytes : 00:00:02.3662649 - -Object created: 00:02:27.7242993 -Full: 10.600.000.088 bytes : 00:02:29.2508853 -Sample: 800.097.990 bytes : 00:00:01.1667667 - -#endif - - private static GraphObject CreateObjectGraph(int num, bool inner = false) - { - var graph = new GraphObject - { - ListField = new List(num) - }; - - int digits = (int)Math.Log10(num) + 1; - var options = new ParallelOptions { MaxDegreeOfParallelism = inner ? 1 : Environment.ProcessorCount }; - Parallel.For(0, num, options, - () => new List(), - (i, state, local) => - { - //var node = new GraphNodeObject { StringField = "Node#" + i.ToString().PadRight(digits) }; - var node = new GraphNodeObject { StringField = "Node#" }; - if (!inner) - { - node.ObjectField = CreateObjectGraph(100, true); - } - local.Add(node); - return local; - }, - local => - { - lock (graph.ListField) - { - graph.ListField.AddRange(local); - } - }); - - //Parallel.For(0, num, i => - //{ - // var node = new GraphNodeObject { StringField = "Node#" + i }; - // if (!inner) - // { - // node.ObjectField = CreateObjectGraph(10_000, true); - // } - - // lock (graph.ListField) - // { - // graph.ListField.Add(node); - // } - //}); - - return graph; - } - - private class GraphObject - { - public int IntField; - public List ListField; - } - - private class GraphNodeObject - { - public double DoubleField; - public int IntField; - public string StringField; - public GraphObject ObjectField; - } - } +using ManagedObjectSize; +using System.Diagnostics; + +namespace SampleApp +{ + internal class Program + { + static void Main(string[] args) + { + var sw = Stopwatch.StartNew(); + var graph = CreateObjectGraph(100_000_000, true); + sw.Stop(); + Console.WriteLine("Object created: " + sw.Elapsed); + Console.Out.Flush(); + + sw = Stopwatch.StartNew(); + long size = ObjectSize.GetObjectInclusiveSize(graph); + sw.Stop(); + Console.WriteLine("Full: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + + sw = Stopwatch.StartNew(); + size = ObjectSize.GetObjectInclusiveSize(graph, new ObjectSizeOptions { + ArraySampleCount = 1000 + }); + sw.Stop(); + Console.WriteLine("Sample: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + } + +#if false +Object created: 00:01:27.3333068 +10.377.777.676 bytes : 00:02:09.7285067 + +Object created: 00:00:54.2183866 +10.377.755.170 bytes : 00:01:23.4178055 + +Object created: 00:00:50.5841990 +10.278.925.504 bytes : 00:01:13.4623666 + +Object created: 00:02:39.7571474 +Full: 10.377.777.868 bytes : 00:02:20.4062759 +Sample: 800.085.782 bytes : 00:00:02.3662649 + +Object created: 00:02:27.7242993 +Full: 10.600.000.088 bytes : 00:02:29.2508853 +Sample: 800.097.990 bytes : 00:00:01.1667667 + +#endif + + private static GraphObject CreateObjectGraph(int num, bool inner = false) + { + var graph = new GraphObject + { + ListField = new List(num) + }; + + int digits = (int)Math.Log10(num) + 1; + var options = new ParallelOptions { MaxDegreeOfParallelism = inner ? 1 : Environment.ProcessorCount }; + Parallel.For(0, num, options, + () => new List(), + (i, state, local) => + { + //var node = new GraphNodeObject { StringField = "Node#" + i.ToString().PadRight(digits) }; + var node = new GraphNodeObject { StringField = "Node#" }; + if (!inner) + { + node.ObjectField = CreateObjectGraph(100, true); + } + local.Add(node); + return local; + }, + local => + { + lock (graph.ListField) + { + graph.ListField.AddRange(local); + } + }); + + //Parallel.For(0, num, i => + //{ + // var node = new GraphNodeObject { StringField = "Node#" + i }; + // if (!inner) + // { + // node.ObjectField = CreateObjectGraph(10_000, true); + // } + + // lock (graph.ListField) + // { + // graph.ListField.Add(node); + // } + //}); + + return graph; + } + + private class GraphObject + { + public int IntField; + public List ListField; + } + + private class GraphNodeObject + { + public double DoubleField; + public int IntField; + public string StringField; + public GraphObject ObjectField; + } + } } \ No newline at end of file From 96c80d62760d7382b4a1cc7f0c0e0d5772a33c31 Mon Sep 17 00:00:00 2001 From: cnkz Date: Sat, 6 Jan 2024 17:37:26 +0100 Subject: [PATCH 06/11] Ensure the MethodTable* is not unloaded prematurely --- src/ManagedObjectSize/ObjectSize.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 256eefa..d47723e 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -586,13 +586,16 @@ private static long GetObjectExclusiveSizeInternal(object? obj) } } + // Ensure that the MethodTable* "mt" of "obj" does not get unloaded, while we need it above. + GC.KeepAlive(obj); + size = size < MinObjSize ? MinObjSize : size; return size; } } [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static unsafe MethodTable* GetMethodTable(object obj) + private static unsafe MethodTable* GetMethodTable(object obj) { // // Get the MethodTable structure. The following code has been lifted from RuntimeHelpers.GetMethodTable(). From 5fbbaaca8868e1e1b9077c6e91173b68c4a48504 Mon Sep 17 00:00:00 2001 From: cnkz Date: Sat, 6 Jan 2024 18:30:04 +0100 Subject: [PATCH 07/11] Fixes --- .../ObjectSizeTests.cs | 1048 ++++++++--------- src/ManagedObjectSize/ObjectSize.cs | 35 +- src/ManagedObjectSize/ObjectSizeOptions.cs | 10 + 3 files changed, 560 insertions(+), 533 deletions(-) diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index 4ea4128..9a7e468 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -1,350 +1,350 @@ -using System.Diagnostics; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; -using System.Text; -using Microsoft.Diagnostics.Runtime; - -namespace ManagedObjectSize.Tests -{ - [TestClass] - public class ObjectSizeTests - { - [TestMethod] - public void ObjectSize_AbortsIfCancellationIsRequested() - { - using (var cts = new CancellationTokenSource()) - { - cts.Cancel(); - - Assert.ThrowsException(() => - { - var options = new ObjectSizeOptions { CancellationToken = cts.Token }; - ObjectSize.GetObjectInclusiveSize("", options); - }); - } - } - - [TestMethod] - public void ObjectSize_UsesTimeoutIfConfigured() - { - Assert.ThrowsException(() => - { - // Shortest possible timeout is 1 tick. - // For any non-null object graph that should be small enough to actually trigger the - // timeout - hopefully. If we see spurious test failures here, we might need to re- - // check or provide some sort of mock support for the timeout calculation inside. - var options = new ObjectSizeOptions { Timeout = TimeSpan.FromTicks(1) }; - ObjectSize.GetObjectInclusiveSize(new ExampleHolder(), options); - }); - } - - [TestMethod] - public void ObjectSize_Null_ReturnsZero() - { - Assert.AreEqual(0, ObjectSize.GetObjectInclusiveSize(null)); - } - - [TestMethod] - public void ObjectSize_IsStable() - { - long size = ObjectSize.GetObjectInclusiveSize(CreateData()); - - for (int i = 0; i < 10; i++) - { - Assert.AreEqual(size, ObjectSize.GetObjectInclusiveSize(CreateData())); - } - - static object CreateData() => Enumerable.Repeat("all of same size", 100).ToList(); - } - - [TestMethod] - [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayReferences_Sampled(int sampleCount, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); - - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - - static object CreateData(int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - result.Add(new ExampleType()); - } - return result; - } - } - - [TestMethod] - [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); - - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - - static object CreateData(int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - result.Add(i); - } - return result; - } - } - - [TestMethod] - [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayReferenceWithValueTypeMember_Sampled(int sampleCount, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); - - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - - static object CreateData(int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - result.Add(new ExampleValue()); - } - return result; - } - } - - [TestMethod] - [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayReferenceWithStringMember_Sampled(bool equalStrings, int sampleCount, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); - - if (equalStrings) - { - // With equal strings the sampling will overestimate the amount of memory used, since - // it doesn't know that in the (not seen) elements some objects are all the same. - Assert.IsTrue(directSize <= sampledSize); - } - else - { - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - } - - static object CreateData(bool equal, int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - var obj = new ExampleHolder(); - obj.StringValue = equal ? "ccccc" : Guid.NewGuid().ToString(); - result.Add(obj); - } - return result; - } - } - - [TestMethod] - [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayStrings_Sampled(bool equalStrings, int sampleCount, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); - - var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); - - if (equalStrings) - { - // With equal strings the sampling will overestimate the amount of memory used, since - // it doesn't know that in the (not seen) elements some objects are all the same. - Assert.IsTrue(directSize <= sampledSize); - } - else - { - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - } - - static object CreateData(bool equal, int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); - } - return result; - } - } - - [TestMethod] - [DynamicData(nameof(GetWithStringSampleConfidences), DynamicDataSourceType.Method)] - public void ObjectSize_ArrayStrings_SampledWithConfidence(bool equalStrings, double confidenceLevel, int count) - { - long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); - var options = new ObjectSizeOptions { ArraySampleConfidenceLevel = confidenceLevel }; - long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); - - if (equalStrings) - { - // With equal strings the sampling will overestimate the amount of memory used, since - // it doesn't know that in the (not seen) elements some objects are all the same. - Assert.IsTrue(directSize <= sampledSize); - } - else - { - // This *should* be true, because in our test data every element has the same size. - // In real live scenarios, where elements may vary in size, this will not be true - // most of the time. - Assert.AreEqual(directSize, sampledSize); - } - - static object CreateData(bool equal, int count) - { - var result = new List(); - for (int i = 0; i < count; i++) - { - result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); - } - return result; - } - } - - // We could also use [DynamicData] to conduct the test of different objects/types, which would - // result in possibly better diagnostics for failed tests, continue running if one test fails, - // and report the "true" number of tests, not just 2 as it is now. - // Using this, however, would also mean that a snapshot (using ClrMD) would be created per - // object/type. While this is relatively cheap on Windows, it would cause much longer times - // on Linux (where PSS snapshots are not supported and a core dump is generated each time, - // spawning createdump.exe, reloading the temp, etc.). - - [DataTestMethod] - [DataRow(false)] - [DataRow(true)] - public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) - { - var data = new Dictionary(); - - // References are on stack and won't be moved by GC. - // So when we take their address for use in ClrMD code - // below, it should still be valid. - var empty = new Empty(); - var valueEmpty = new ValueEmpty(); - string @string = "Hello World"; - var exampleHolder = new ExampleHolder(); - var exampleHolder2 = new ExampleHolder2(); - var exampleHolder3 = new ExampleHolder3(); - var exampleHolder4 = new ExampleHolder4(); - var alignedDoubleSeq = new AlignedDoubleSequential(); - var alignedDoubleAuto = new AlignedDoubleAuto(); - var stringBuilder = new StringBuilder("Hello There"); - var selfRef = new SelfRef { Ref = new SelfRef() }; - selfRef.Ref.Ref = selfRef; - var withPointer = new TypeWithPointer { Ptr = (void*)Utils.GetVolatileHeapPointer(@string) }; - - var stringArray = new string[] { "ccccc", "ccccc", "ccccc", "ccccc", "ccccc", "ccccc" }; - var valueArray = new int[] { 1, 2, 3 }; - var valueRefArray = new[] { new ValueTypeWithRef("1"), new ValueTypeWithRef("1") }; - var refArray = new[] { new ExampleType(), new ExampleType() }; - var refWithDifferentStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("aaaaa") }; - var refWithSameStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("bbbbb") }; - var pointerArray = new void*[] { (void*)Utils.GetVolatileHeapPointer(@string), (void*)Utils.GetVolatileHeapPointer(empty) }; - var emptyValueArray = new int[] { }; - var emptyRefArray = new Empty[] { }; - var emptyValueRefArray = new ValueTypeWithRef[] { }; - var emptyPointerArray = new void*[] { }; - var jaggedArray = new int[10][]; - for (int i = 0; i < 10; i++) - { - jaggedArray[i] = new[] { 1, 2, 3, 4, 5 }; - } - var multiDimensionalArray = new int[,] - { - { 1, 2, 3, 4, 5 }, - { 1, 2, 3, 4, 5 }, - { 1, 2, 3, 4, 5 }, - { 1, 2, 3, 4, 5 } - }; - - string internedString1 = String.Intern("INTERNED"); - string internedString2 = String.Intern("INTERNED"); +using System.Diagnostics; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using System.Text; +using Microsoft.Diagnostics.Runtime; + +namespace ManagedObjectSize.Tests +{ + [TestClass] + public class ObjectSizeTests + { + [TestMethod] + public void ObjectSize_AbortsIfCancellationIsRequested() + { + using (var cts = new CancellationTokenSource()) + { + cts.Cancel(); + + Assert.ThrowsException(() => + { + var options = new ObjectSizeOptions { CancellationToken = cts.Token }; + ObjectSize.GetObjectInclusiveSize("", options); + }); + } + } + + [TestMethod] + public void ObjectSize_UsesTimeoutIfConfigured() + { + Assert.ThrowsException(() => + { + // Shortest possible timeout is 1 tick. + // For any non-null object graph that should be small enough to actually trigger the + // timeout - hopefully. If we see spurious test failures here, we might need to re- + // check or provide some sort of mock support for the timeout calculation inside. + var options = new ObjectSizeOptions { Timeout = TimeSpan.FromTicks(1) }; + ObjectSize.GetObjectInclusiveSize(new ExampleHolder(), options); + }); + } + + [TestMethod] + public void ObjectSize_Null_ReturnsZero() + { + Assert.AreEqual(0, ObjectSize.GetObjectInclusiveSize(null)); + } + + [TestMethod] + public void ObjectSize_IsStable() + { + long size = ObjectSize.GetObjectInclusiveSize(CreateData()); + + for (int i = 0; i < 10; i++) + { + Assert.AreEqual(size, ObjectSize.GetObjectInclusiveSize(CreateData())); + } + + static object CreateData() => Enumerable.Repeat("all of same size", 100).ToList(); + } + + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferences_Sampled(int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(new ExampleType()); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayValueTypes_Sampled(int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(i); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferenceWithValueTypeMember_Sampled(int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(count), options); + + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + + static object CreateData(int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(new ExampleValue()); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayReferenceWithStringMember_Sampled(bool equalStrings, int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); + + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } + + static object CreateData(bool equal, int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + var obj = new ExampleHolder(); + obj.StringValue = equal ? "ccccc" : Guid.NewGuid().ToString(); + result.Add(obj); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetWithStringSampleSizes), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayStrings_Sampled(bool equalStrings, int sampleCount, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); + + var options = new ObjectSizeOptions { ArraySampleCount = sampleCount }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); + + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } + + static object CreateData(bool equal, int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); + } + return result; + } + } + + [TestMethod] + [DynamicData(nameof(GetWithStringSampleConfidences), DynamicDataSourceType.Method)] + public void ObjectSize_ArrayStrings_SampledWithConfidence(bool equalStrings, double confidenceLevel, int count) + { + long directSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count)); + var options = new ObjectSizeOptions { ArraySampleConfidenceLevel = confidenceLevel }; + long sampledSize = ObjectSize.GetObjectInclusiveSize(CreateData(equalStrings, count), options); + + if (equalStrings) + { + // With equal strings the sampling will overestimate the amount of memory used, since + // it doesn't know that in the (not seen) elements some objects are all the same. + Assert.IsTrue(directSize <= sampledSize); + } + else + { + // This *should* be true, because in our test data every element has the same size. + // In real live scenarios, where elements may vary in size, this will not be true + // most of the time. + Assert.AreEqual(directSize, sampledSize); + } + + static object CreateData(bool equal, int count) + { + var result = new List(); + for (int i = 0; i < count; i++) + { + result.Add(equal ? "ccccc" : Guid.NewGuid().ToString()); + } + return result; + } + } + + // We could also use [DynamicData] to conduct the test of different objects/types, which would + // result in possibly better diagnostics for failed tests, continue running if one test fails, + // and report the "true" number of tests, not just 2 as it is now. + // Using this, however, would also mean that a snapshot (using ClrMD) would be created per + // object/type. While this is relatively cheap on Windows, it would cause much longer times + // on Linux (where PSS snapshots are not supported and a core dump is generated each time, + // spawning createdump.exe, reloading the temp, etc.). + + [DataTestMethod] + [DataRow(false)] + [DataRow(true)] + public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) + { + var data = new Dictionary(); + + // References are on stack and won't be moved by GC. + // So when we take their address for use in ClrMD code + // below, it should still be valid. + var empty = new Empty(); + var valueEmpty = new ValueEmpty(); + string @string = "Hello World"; + var exampleHolder = new ExampleHolder(); + var exampleHolder2 = new ExampleHolder2(); + var exampleHolder3 = new ExampleHolder3(); + var exampleHolder4 = new ExampleHolder4(); + var alignedDoubleSeq = new AlignedDoubleSequential(); + var alignedDoubleAuto = new AlignedDoubleAuto(); + var stringBuilder = new StringBuilder("Hello There"); + var selfRef = new SelfRef { Ref = new SelfRef() }; + selfRef.Ref.Ref = selfRef; + var withPointer = new TypeWithPointer { Ptr = (void*)Utils.GetVolatileHeapPointer(@string) }; + + var stringArray = new string[] { "ccccc", "ccccc", "ccccc", "ccccc", "ccccc", "ccccc" }; + var valueArray = new int[] { 1, 2, 3 }; + var valueRefArray = new[] { new ValueTypeWithRef("1"), new ValueTypeWithRef("1") }; + var refArray = new[] { new ExampleType(), new ExampleType() }; + var refWithDifferentStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("aaaaa") }; + var refWithSameStringsArray = new[] { new TypeWithStringRef("aaaaa"), new TypeWithStringRef("bbbbb") }; + var pointerArray = new void*[] { (void*)Utils.GetVolatileHeapPointer(@string), (void*)Utils.GetVolatileHeapPointer(empty) }; + var emptyValueArray = new int[] { }; + var emptyRefArray = new Empty[] { }; + var emptyValueRefArray = new ValueTypeWithRef[] { }; + var emptyPointerArray = new void*[] { }; + var jaggedArray = new int[10][]; + for (int i = 0; i < 10; i++) + { + jaggedArray[i] = new[] { 1, 2, 3, 4, 5 }; + } + var multiDimensionalArray = new int[,] + { + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 }, + { 1, 2, 3, 4, 5 } + }; + + string internedString1 = String.Intern("INTERNED"); + string internedString2 = String.Intern("INTERNED"); var internedStrings = new string[] { internedString1, internedString2 }; - var privateBaseField = new WithPrivateBaseFieldType("Hello") { PublichBaseField = "Public" }; - var valueTypeWithRefs = (1, 2, (3, (4, new StringBuilder("hi")))); - - var options = new ObjectSizeOptions(); - options.UseRtHelpers = useRtHelpers; - //options.DebugOutput = true; - - GetSize(options, empty, data); - GetSize(options, valueEmpty, data); - GetSize(options, @string, data); - GetSize(options, exampleHolder, data); - GetSize(options, exampleHolder2, data); - GetSize(options, exampleHolder3, data); - GetSize(options, exampleHolder4, data); - GetSize(options, alignedDoubleSeq, data); - GetSize(options, alignedDoubleAuto, data); - GetSize(options, stringBuilder, data); - GetSize(options, selfRef, data); - GetSize(options, withPointer, data); - - GetSize(options, stringArray, data); - GetSize(options, valueArray, data); - GetSize(options, valueRefArray, data); - GetSize(options, refArray, data); - GetSize(options, refWithDifferentStringsArray, data); - GetSize(options, refWithSameStringsArray, data); - GetSize(options, pointerArray, data); - GetSize(options, emptyValueArray, data); - GetSize(options, emptyValueRefArray, data); - GetSize(options, emptyRefArray, data); - GetSize(options, emptyPointerArray, data); - GetSize(options, jaggedArray, data); - GetSize(options, multiDimensionalArray, data); - + var privateBaseField = new WithPrivateBaseFieldType("Hello") { PublichBaseField = "Public" }; + var valueTypeWithRefs = (1, 2, (3, (4, new StringBuilder("hi")))); + + var options = new ObjectSizeOptions(); + options.UseRtHelpers = useRtHelpers; + //options.DebugOutput = true; + + GetSize(options, empty, data); + GetSize(options, valueEmpty, data); + GetSize(options, @string, data); + GetSize(options, exampleHolder, data); + GetSize(options, exampleHolder2, data); + GetSize(options, exampleHolder3, data); + GetSize(options, exampleHolder4, data); + GetSize(options, alignedDoubleSeq, data); + GetSize(options, alignedDoubleAuto, data); + GetSize(options, stringBuilder, data); + GetSize(options, selfRef, data); + GetSize(options, withPointer, data); + + GetSize(options, stringArray, data); + GetSize(options, valueArray, data); + GetSize(options, valueRefArray, data); + GetSize(options, refArray, data); + GetSize(options, refWithDifferentStringsArray, data); + GetSize(options, refWithSameStringsArray, data); + GetSize(options, pointerArray, data); + GetSize(options, emptyValueArray, data); + GetSize(options, emptyValueRefArray, data); + GetSize(options, emptyRefArray, data); + GetSize(options, emptyPointerArray, data); + GetSize(options, jaggedArray, data); + GetSize(options, multiDimensionalArray, data); + GetSize(options, internedStrings, data); GetSize(options, privateBaseField, data); - GetSize(options, valueTypeWithRefs, data); - - using (var dt = DataTarget.CreateSnapshotAndAttach(Environment.ProcessId)) - { - using (var runtime = dt.ClrVersions.Single().CreateRuntime()) - { - foreach (ulong address in data.Keys) - { - string currentName = data[address].Name; - - var clrObj = runtime.Heap.GetObject(address); - + GetSize(options, valueTypeWithRefs, data); + + using (var dt = DataTarget.CreateSnapshotAndAttach(Environment.ProcessId)) + { + using (var runtime = dt.ClrVersions.Single().CreateRuntime()) + { + foreach (ulong address in data.Keys) + { + string currentName = data[address].Name; + + var clrObj = runtime.Heap.GetObject(address); + // Sanity check that address (still) refers to something valid. Assert.IsTrue(clrObj.IsValid, currentName + " IsValid"); @@ -355,62 +355,62 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) (int count, ulong inclusiveSize, ulong exclusiveSize) = ObjSize(clrObj, options.DebugOutput); Assert.AreEqual((long)inclusiveSize, data[address].InclusiveSize, currentName + " InclusiveSize"); Assert.AreEqual((long)exclusiveSize, data[address].ExclusiveSize, currentName + " ExclusiveSize"); - Assert.AreEqual(count, data[address].Count, currentName + " Count"); - } - } - } - } - - private static void GetSize(ObjectSizeOptions options, object obj, - Dictionary sizes, - [CallerArgumentExpression("obj")] string? name = null) - { - long exclusiveSize = ObjectSize.GetObjectExclusiveSize(obj, options); - long inclusiveSize = ObjectSize.GetObjectInclusiveSize(obj, options, out long count); - - ulong address = (ulong)Utils.GetVolatileHeapPointer(obj); - - sizes.Add(address, (name!, obj.GetType(), count, exclusiveSize, inclusiveSize)); - } - - private static (int count, ulong size, ulong excSize) ObjSize(ClrObject input, bool debugOutput) - { - var considered = new HashSet() { input }; - var stack = new Stack(100); - stack.Push(input); - - int count = 0; - ulong totalSize = 0; - - while (stack.Count > 0) - { - var curr = stack.Pop(); - - count++; - totalSize += curr.Size; - - if (debugOutput) - { - Console.WriteLine($"[CLRMD] [{count:N0}] {(totalSize - curr.Size):N0} -> {totalSize:N0} ({curr.Size:N0}: {curr.Type})"); - } - - foreach (var obj in curr.EnumerateReferences(carefully: false, considerDependantHandles: false)) - { - if (considered.Add(obj)) - { - stack.Push(obj); - } - } - } - - if (debugOutput) - { - Console.WriteLine($"[CLRMD] total: {totalSize:N0} ({input.Type})"); - } - - return (count, totalSize, input.Size); - } - + Assert.AreEqual(count, data[address].Count, currentName + " Count"); + } + } + } + } + + private static void GetSize(ObjectSizeOptions options, object obj, + Dictionary sizes, + [CallerArgumentExpression("obj")] string? name = null) + { + long exclusiveSize = ObjectSize.GetObjectExclusiveSize(obj, options); + long inclusiveSize = ObjectSize.GetObjectInclusiveSize(obj, options, out long count); + + ulong address = (ulong)Utils.GetVolatileHeapPointer(obj); + + sizes.Add(address, (name!, obj.GetType(), count, exclusiveSize, inclusiveSize)); + } + + private static (int count, ulong size, ulong excSize) ObjSize(ClrObject input, bool debugOutput) + { + var considered = new HashSet() { input }; + var stack = new Stack(100); + stack.Push(input); + + int count = 0; + ulong totalSize = 0; + + while (stack.Count > 0) + { + var curr = stack.Pop(); + + count++; + totalSize += curr.Size; + + if (debugOutput) + { + Console.WriteLine($"[CLRMD] [{count:N0}] {(totalSize - curr.Size):N0} -> {totalSize:N0} ({curr.Size:N0}: {curr.Type})"); + } + + foreach (var obj in curr.EnumerateReferences(carefully: false, considerDependantHandles: false)) + { + if (considered.Add(obj)) + { + stack.Push(obj); + } + } + } + + if (debugOutput) + { + Console.WriteLine($"[CLRMD] total: {totalSize:N0} ({input.Type})"); + } + + return (count, totalSize, input.Size); + } + private static string GetClrMDLikeTypeName(Type type) { var sb = new StringBuilder(); @@ -486,131 +486,131 @@ private static void GetClrMDLikeTypeName(Type type, StringBuilder sb) } sb.Append(']'); } - } - - - private static readonly int[] s_sampleSizesFor100 = new[] { 2, 5, 10, 50, 75, 99, 100, 101 }; - - private static IEnumerable GetWithStringSampleSizes() - { - foreach (var size in s_sampleSizesFor100) - { - yield return new object[] { true, size, 100 }; - } - - foreach (var size in s_sampleSizesFor100) - { - yield return new object[] { false, size, 100 }; - } - } - - private static IEnumerable GetSampleSizes() - { - foreach (var size in s_sampleSizesFor100) - { - yield return new object[] { size, 123 }; - } - } - - private static readonly double[] s_sampleConfidences = new[] { 0.9, 0.95, 0.99 }; - - private static IEnumerable GetWithStringSampleConfidences() - { - foreach (var confidenceLevel in s_sampleConfidences) - { - yield return new object[] { true, confidenceLevel, 10_000 }; - } - - foreach (var confidenceLevel in s_sampleConfidences) - { - yield return new object[] { false, confidenceLevel, 10_000 }; - } - } - - private class Empty { } - private struct ValueEmpty { } - - [StructLayout(LayoutKind.Auto)] - private struct AlignedDoubleAuto - { - public byte B; - public double D; - public int I; - } - - [StructLayout(LayoutKind.Sequential)] - private struct AlignedDoubleSequential - { - public byte B; - public double D; - public int I; - } - - private class ExampleHolder - { - public string StringValue = "A string value"; - } - - private class ExampleHolder2 - { - public string StringValue1 = "A string value one"; - public string StringValue2 = "A string value number two"; - public ExampleValue ExampleValue = new(); - } - - private class ExampleHolder3 - { - public string StringValue = "A string value"; - public ExampleType ExampleType = new(); - } - - private class ExampleHolder4 - { - public FileAccess EnumValue = FileAccess.Read; - } - - private class ExampleType - { - } - - private struct ExampleValue - { - public ExampleValue() - { - } - - public int Int32Value1 = 1; - public int Int32Value2 = 2; - } - - private struct ValueTypeWithRef - { - public ValueTypeWithRef(string s) - { - Value = s; - } - public string Value; - } - - private class TypeWithStringRef - { - public TypeWithStringRef(string s) - { - Value = s; - } - public string Value; - } - - private class SelfRef - { - public SelfRef? Ref; - } - - private unsafe class TypeWithPointer - { - public void* Ptr; - } - + } + + + private static readonly int[] s_sampleSizesFor100 = new[] { 2, 5, 10, 50, 75, 99, 100, 101 }; + + private static IEnumerable GetWithStringSampleSizes() + { + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { true, size, 100 }; + } + + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { false, size, 100 }; + } + } + + private static IEnumerable GetSampleSizes() + { + foreach (var size in s_sampleSizesFor100) + { + yield return new object[] { size, 123 }; + } + } + + private static readonly double[] s_sampleConfidences = new[] { 0.9, 0.95, 0.99 }; + + private static IEnumerable GetWithStringSampleConfidences() + { + foreach (var confidenceLevel in s_sampleConfidences) + { + yield return new object[] { true, confidenceLevel, 10_000 }; + } + + foreach (var confidenceLevel in s_sampleConfidences) + { + yield return new object[] { false, confidenceLevel, 10_000 }; + } + } + + private class Empty { } + private struct ValueEmpty { } + + [StructLayout(LayoutKind.Auto)] + private struct AlignedDoubleAuto + { + public byte B; + public double D; + public int I; + } + + [StructLayout(LayoutKind.Sequential)] + private struct AlignedDoubleSequential + { + public byte B; + public double D; + public int I; + } + + private class ExampleHolder + { + public string StringValue = "A string value"; + } + + private class ExampleHolder2 + { + public string StringValue1 = "A string value one"; + public string StringValue2 = "A string value number two"; + public ExampleValue ExampleValue = new(); + } + + private class ExampleHolder3 + { + public string StringValue = "A string value"; + public ExampleType ExampleType = new(); + } + + private class ExampleHolder4 + { + public FileAccess EnumValue = FileAccess.Read; + } + + private class ExampleType + { + } + + private struct ExampleValue + { + public ExampleValue() + { + } + + public int Int32Value1 = 1; + public int Int32Value2 = 2; + } + + private struct ValueTypeWithRef + { + public ValueTypeWithRef(string s) + { + Value = s; + } + public string Value; + } + + private class TypeWithStringRef + { + public TypeWithStringRef(string s) + { + Value = s; + } + public string Value; + } + + private class SelfRef + { + public SelfRef? Ref; + } + + private unsafe class TypeWithPointer + { + public void* Ptr; + } + private class BaseType { public BaseType(string s) @@ -628,6 +628,6 @@ public WithPrivateBaseFieldType(string s) : base(s) { } - } - } + } + } } \ No newline at end of file diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index d47723e..68055c5 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -454,13 +454,18 @@ private static unsafe void AddFields(Stack eval, HashSet conside { foreach (var field in GetFields(objType)) { + // Non reference type fields are "in place" in the actual type and thus are already included in + // GetObjectExclusiveSize(). This is also true for custom value types. However, the later might + // have reference type members. These need to be considered. So if the actual field we are dealing + // with is a value type, we search it (and all its fields) for reference type fields. If we haven't + // seen any of those before, we add it to be evaluated. + if (field.FieldType.IsValueType) { - // Non reference type fields are "in place" in the actual type and thus are already included in - // GetObjectExclusiveSize(). This is also true for custom value types. However, the later might - // have reference type members. These need to be considered. So if the actual field we are dealing - // with is a value type, we search it (and all its fields) for reference type fields. If we haven't - // seen any of those before, we add it to be evaluated. + if (!IsReferenceOrContainsReferences(field.FieldType)) + { + continue; + } var stack = new Stack(); stack.Push(field.GetValue(currentObject)); @@ -478,10 +483,7 @@ private static unsafe void AddFields(Stack eval, HashSet conside object? value = f.GetValue(currentValue); if (f.FieldType.IsValueType) { - // Ignore primitive types (like System.Int32). Due to their - // nature (for example, System.Int32 has a field "m_value" of type - // System.Int32), they would lead to endless processing here. - if (!f.FieldType.IsPrimitive) + if (IsReferenceOrContainsReferences(f.FieldType)) { stack.Push(value); } @@ -647,6 +649,21 @@ private static long GetObjectExclusiveSizeInternal(object? obj) // But since the CLR itself uses the above code internally, we rather stick with that. } + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private unsafe static bool IsReferenceOrContainsReferences(Type type) + { + // Related to RuntimeHelpers.IsReferenceOrContainsReferences<>, but here we need to use a System.Type and + // not a generic parameter. Hence the following is equivalent to calling: + // + // return (bool)typeof(RuntimeHelpers).GetMethod("IsReferenceOrContainsReferences").MakeGenericMethod(type).Invoke(null, null); + // + // Also, using this way to get the MethodTable, because GetMethodTable() requires a reference of that type. + + bool result = !type.IsValueType || ((MethodTable*)type.TypeHandle.Value.ToPointer())->ContainsPointers; + GC.KeepAlive(type); + return result; + } + internal sealed class RawData { public byte Data; diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index c9a1c03..fcdc5fc 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -204,6 +204,7 @@ internal ObjectSizeOptions GetReadOnly() Timeout = m_timeout, CancellationToken = m_cancellationToken, DebugWriter = m_debugWriter, + CollectStatistics = m_collectStatistics, IsReadOnly = true }; return result; @@ -276,6 +277,15 @@ public string GetEnabledString() sb.Append(nameof(DebugOutput)).Append("=true"); } + if (CollectStatistics) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(CollectStatistics)).Append("=true"); + } if (sb.Length == 0) { From a9d9cae6597d483fc57c21ef39241cd01657601a Mon Sep 17 00:00:00 2001 From: cnkz Date: Sat, 6 Jan 2024 18:35:02 +0100 Subject: [PATCH 08/11] Fixes --- src/ManagedObjectSize/ObjectSize.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 68055c5..46f9dd9 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -464,6 +464,7 @@ private static unsafe void AddFields(Stack eval, HashSet conside { if (!IsReferenceOrContainsReferences(field.FieldType)) { + // Value type contains no further reference type fields. continue; } @@ -483,6 +484,7 @@ private static unsafe void AddFields(Stack eval, HashSet conside object? value = f.GetValue(currentValue); if (f.FieldType.IsValueType) { + // Check if field's type contains further reference type fields. if (IsReferenceOrContainsReferences(f.FieldType)) { stack.Push(value); From fcd95f8281581368485c38a5f265083b0a001990 Mon Sep 17 00:00:00 2001 From: cnkz Date: Sun, 7 Jan 2024 16:46:29 +0100 Subject: [PATCH 09/11] Add Benchmarks --- .../ArraySamplingBenchmarks.cs | 91 +++++++++++++++++++ .../ManagedObjectSize.Benchmarks.csproj | 18 ++++ src/ManagedObjectSize.Benchmarks/Program.cs | 9 ++ .../ObjectSizeTests.cs | 2 +- src/ManagedObjectSize.sln | 80 ++++++++-------- src/SampleApp/Program.cs | 8 +- 6 files changed, 167 insertions(+), 41 deletions(-) create mode 100644 src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs create mode 100644 src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj create mode 100644 src/ManagedObjectSize.Benchmarks/Program.cs diff --git a/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs b/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs new file mode 100644 index 0000000..399c321 --- /dev/null +++ b/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs @@ -0,0 +1,91 @@ +using BenchmarkDotNet.Attributes; +using BenchmarkDotNet.Diagnostics.Windows.Configs; + +namespace ManagedObjectSize.Benchmarks +{ + [MemoryDiagnoser, EtwProfiler] + public class ArraySamplingBenchmarks + { + [Params(20, 100)] public int N; + + private GraphObject m_graphData = null!; + private int[] m_intData = null!; + private string[] m_stringData = null!; + + private ObjectSizeOptions m_samplingOptions = null!; + + [GlobalSetup] + public void GlobalSetup() + { + m_graphData = CreateObjectGraph(N); + m_intData = new int[N]; + m_stringData = new string[N]; + + for (int i = 0; i < N; i++) + { + m_intData[i] = i; + m_stringData[i] = "string#" + i; + } + + m_samplingOptions = new() { ArraySampleCount = N / 10 }; + } + + [Benchmark] public long NoSampling_Int32() => ObjectSize.GetObjectInclusiveSize(m_intData); + [Benchmark] public long NoSampling_String() => ObjectSize.GetObjectInclusiveSize(m_stringData); + [Benchmark] public long NoSampling_Graph() => ObjectSize.GetObjectInclusiveSize(m_graphData); + + [Benchmark] public long Sampling_Int32() => ObjectSize.GetObjectInclusiveSize(m_intData, m_samplingOptions); + [Benchmark] public long Sampling_String() => ObjectSize.GetObjectInclusiveSize(m_stringData, m_samplingOptions); + [Benchmark] public long Sampling_Graph() => ObjectSize.GetObjectInclusiveSize(m_graphData, m_samplingOptions); + + // --------------------------------------------------------------------------------------- + + private static GraphObject CreateObjectGraph(int num, bool inner = false) + { + var graph = new GraphObject + { + ListField = new List(num) + }; + + int digits = (int)Math.Log10(num) + 1; + var options = new ParallelOptions { MaxDegreeOfParallelism = (inner || num < 100) ? 1 : Environment.ProcessorCount }; + Parallel.For(0, num, options, + () => new List(), + (i, state, local) => + { + var node = new GraphNodeObject { StringField = "Node#" }; + if (!inner) + { + node.ObjectField = CreateObjectGraph(100, true); + } + local.Add(node); + return local; + }, + local => + { + lock (graph.ListField) + { + graph.ListField.AddRange(local); + } + }); + + return graph; + } + +#pragma warning disable CS0649 + + private class GraphObject + { + public int IntField; + public List ListField = null!; + } + + private class GraphNodeObject + { + public double DoubleField; + public int IntField; + public string StringField = null!; + public GraphObject ObjectField = null!; + } + } +} diff --git a/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj b/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj new file mode 100644 index 0000000..c4323e3 --- /dev/null +++ b/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj @@ -0,0 +1,18 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + diff --git a/src/ManagedObjectSize.Benchmarks/Program.cs b/src/ManagedObjectSize.Benchmarks/Program.cs new file mode 100644 index 0000000..b34af6a --- /dev/null +++ b/src/ManagedObjectSize.Benchmarks/Program.cs @@ -0,0 +1,9 @@ +using BenchmarkDotNet.Running; + +namespace ManagedObjectSize.Benchmarks +{ + internal class Program + { + public static void Main(string[] args) => BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); + } +} diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index 9a7e468..aa1fe27 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -619,7 +619,7 @@ public BaseType(string s) } private string m_privateBaseField; - public string PublichBaseField; + public string PublichBaseField = null!; public override string ToString() => m_privateBaseField; } private class WithPrivateBaseFieldType : BaseType diff --git a/src/ManagedObjectSize.sln b/src/ManagedObjectSize.sln index 7d1733c..d76db4a 100644 --- a/src/ManagedObjectSize.sln +++ b/src/ManagedObjectSize.sln @@ -1,37 +1,43 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.4.33205.214 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize", "ManagedObjectSize\ManagedObjectSize.csproj", "{05847415-B5F4-4998-B6AB-011BC434A46C}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize.Tests", "ManagedObjectSize.Tests\ManagedObjectSize.Tests.csproj", "{F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SampleApp", "SampleApp\SampleApp.csproj", "{1805F1BA-EB96-47F3-99F4-7B35F1613679}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Release|Any CPU = Release|Any CPU - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {05847415-B5F4-4998-B6AB-011BC434A46C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {05847415-B5F4-4998-B6AB-011BC434A46C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {05847415-B5F4-4998-B6AB-011BC434A46C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {05847415-B5F4-4998-B6AB-011BC434A46C}.Release|Any CPU.Build.0 = Release|Any CPU - {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Release|Any CPU.Build.0 = Release|Any CPU - {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Release|Any CPU.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {EDB31B9A-E707-444C-952C-3B039CA17B63} - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.4.33205.214 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize", "ManagedObjectSize\ManagedObjectSize.csproj", "{05847415-B5F4-4998-B6AB-011BC434A46C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize.Tests", "ManagedObjectSize.Tests\ManagedObjectSize.Tests.csproj", "{F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SampleApp", "SampleApp\SampleApp.csproj", "{1805F1BA-EB96-47F3-99F4-7B35F1613679}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ManagedObjectSize.Benchmarks", "ManagedObjectSize.Benchmarks\ManagedObjectSize.Benchmarks.csproj", "{F5535CC5-2FC0-4594-878D-DCEAF4D66C06}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {05847415-B5F4-4998-B6AB-011BC434A46C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05847415-B5F4-4998-B6AB-011BC434A46C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05847415-B5F4-4998-B6AB-011BC434A46C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05847415-B5F4-4998-B6AB-011BC434A46C}.Release|Any CPU.Build.0 = Release|Any CPU + {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91}.Release|Any CPU.Build.0 = Release|Any CPU + {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1805F1BA-EB96-47F3-99F4-7B35F1613679}.Release|Any CPU.Build.0 = Release|Any CPU + {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {EDB31B9A-E707-444C-952C-3B039CA17B63} + EndGlobalSection +EndGlobal diff --git a/src/SampleApp/Program.cs b/src/SampleApp/Program.cs index c5ef86a..12dd84c 100644 --- a/src/SampleApp/Program.cs +++ b/src/SampleApp/Program.cs @@ -93,18 +93,20 @@ private static GraphObject CreateObjectGraph(int num, bool inner = false) return graph; } +#pragma warning disable CS0649 + private class GraphObject { public int IntField; - public List ListField; + public List ListField = null!; } private class GraphNodeObject { public double DoubleField; public int IntField; - public string StringField; - public GraphObject ObjectField; + public string StringField = null!; + public GraphObject ObjectField = null!; } } } \ No newline at end of file From 79c7c989a576034293e9f45d3a0596b25dab3f92 Mon Sep 17 00:00:00 2001 From: cnkz Date: Sun, 7 Jan 2024 18:44:23 +0100 Subject: [PATCH 10/11] Use object pooling. --- .../ArraySamplingBenchmarks.cs | 52 +-------------- src/ManagedObjectSize.Benchmarks/GraphData.cs | 50 ++++++++++++++ .../ObjectPoolBenchmarks.cs | 28 ++++++++ .../ManagedObjectSize.Tests.csproj | 2 +- .../ObjectSizeTests.cs | 66 +++++++++++++------ .../ManagedObjectSize.csproj | 4 ++ src/ManagedObjectSize/ObjectSize.cs | 50 ++++++++------ src/ManagedObjectSize/ObjectSizeOptions.cs | 60 +++++++++++++++++ 8 files changed, 219 insertions(+), 93 deletions(-) create mode 100644 src/ManagedObjectSize.Benchmarks/GraphData.cs create mode 100644 src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs diff --git a/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs b/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs index 399c321..c4861be 100644 --- a/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs +++ b/src/ManagedObjectSize.Benchmarks/ArraySamplingBenchmarks.cs @@ -17,7 +17,7 @@ public class ArraySamplingBenchmarks [GlobalSetup] public void GlobalSetup() { - m_graphData = CreateObjectGraph(N); + m_graphData = GraphObject.CreateObjectGraph(N); m_intData = new int[N]; m_stringData = new string[N]; @@ -37,55 +37,5 @@ public void GlobalSetup() [Benchmark] public long Sampling_Int32() => ObjectSize.GetObjectInclusiveSize(m_intData, m_samplingOptions); [Benchmark] public long Sampling_String() => ObjectSize.GetObjectInclusiveSize(m_stringData, m_samplingOptions); [Benchmark] public long Sampling_Graph() => ObjectSize.GetObjectInclusiveSize(m_graphData, m_samplingOptions); - - // --------------------------------------------------------------------------------------- - - private static GraphObject CreateObjectGraph(int num, bool inner = false) - { - var graph = new GraphObject - { - ListField = new List(num) - }; - - int digits = (int)Math.Log10(num) + 1; - var options = new ParallelOptions { MaxDegreeOfParallelism = (inner || num < 100) ? 1 : Environment.ProcessorCount }; - Parallel.For(0, num, options, - () => new List(), - (i, state, local) => - { - var node = new GraphNodeObject { StringField = "Node#" }; - if (!inner) - { - node.ObjectField = CreateObjectGraph(100, true); - } - local.Add(node); - return local; - }, - local => - { - lock (graph.ListField) - { - graph.ListField.AddRange(local); - } - }); - - return graph; - } - -#pragma warning disable CS0649 - - private class GraphObject - { - public int IntField; - public List ListField = null!; - } - - private class GraphNodeObject - { - public double DoubleField; - public int IntField; - public string StringField = null!; - public GraphObject ObjectField = null!; - } } } diff --git a/src/ManagedObjectSize.Benchmarks/GraphData.cs b/src/ManagedObjectSize.Benchmarks/GraphData.cs new file mode 100644 index 0000000..a069ecb --- /dev/null +++ b/src/ManagedObjectSize.Benchmarks/GraphData.cs @@ -0,0 +1,50 @@ +namespace ManagedObjectSize.Benchmarks +{ + internal class GraphObject + { + public static GraphObject CreateObjectGraph(int num, bool inner = false) + { + var graph = new GraphObject + { + ListField = new List(num) + }; + + int digits = (int)Math.Log10(num) + 1; + var options = new ParallelOptions { MaxDegreeOfParallelism = (inner || num < 100) ? 1 : Environment.ProcessorCount }; + Parallel.For(0, num, options, + () => new List(), + (i, state, local) => + { + var node = new GraphNodeObject { StringField = "Node#" }; + if (!inner) + { + node.ObjectField = CreateObjectGraph(100, true); + } + local.Add(node); + return local; + }, + local => + { + lock (graph.ListField) + { + graph.ListField.AddRange(local); + } + }); + + return graph; + } + +#pragma warning disable CS0649 + + public int IntField; + public List ListField = null!; + + public class GraphNodeObject + { + public double DoubleField; + public int IntField; + public string StringField = null!; + public GraphObject ObjectField = null!; + } + } +} diff --git a/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs b/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs new file mode 100644 index 0000000..81321e5 --- /dev/null +++ b/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs @@ -0,0 +1,28 @@ +using BenchmarkDotNet.Attributes; +using Microsoft.Extensions.ObjectPool; + +namespace ManagedObjectSize.Benchmarks +{ + [MemoryDiagnoser] + public class ObjectPoolBenchmarks + { + [Params(100, 1000)] public int N; + + private GraphObject m_graphData = null!; + private ObjectSizeOptions m_options = null!; + + [GlobalSetup] + public void GlobalSetup() + { + m_graphData = GraphObject.CreateObjectGraph(N); + m_options = new ObjectSizeOptions + { + PoolProvider = new DefaultObjectPoolProvider() + }; + } + + [Benchmark] public long NoPool() => ObjectSize.GetObjectInclusiveSize(m_graphData); + + [Benchmark] public long Pool() => ObjectSize.GetObjectInclusiveSize(m_graphData, m_options); + } +} diff --git a/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj b/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj index 5acd8a3..5793873 100644 --- a/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj +++ b/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj @@ -10,7 +10,7 @@ - + diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index aa1fe27..73a7bbc 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -3,6 +3,7 @@ using System.Runtime.InteropServices; using System.Text; using Microsoft.Diagnostics.Runtime; +using Microsoft.Extensions.ObjectPool; namespace ManagedObjectSize.Tests { @@ -244,9 +245,10 @@ static object CreateData(bool equal, int count) // spawning createdump.exe, reloading the temp, etc.). [DataTestMethod] - [DataRow(false)] - [DataRow(true)] - public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) + [DataRow(false, false)] + [DataRow(false, true)] + [DataRow(true, false)] + public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers, bool useObjectPool) { var data = new Dictionary(); @@ -301,6 +303,17 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) var options = new ObjectSizeOptions(); options.UseRtHelpers = useRtHelpers; //options.DebugOutput = true; + if (useObjectPool) + { + options.PoolProvider = new DefaultObjectPoolProvider(); + } + + // We require the addresses of the test objects to not change. We determine the address during GetSize() + // and need it to stay the same until we have created a memory snapshot. + if (!GC.TryStartNoGCRegion(100_000_000)) + { + throw new InvalidOperationException("Failed to start no GC region"); + } GetSize(options, empty, data); GetSize(options, valueEmpty, data); @@ -314,7 +327,6 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) GetSize(options, stringBuilder, data); GetSize(options, selfRef, data); GetSize(options, withPointer, data); - GetSize(options, stringArray, data); GetSize(options, valueArray, data); GetSize(options, valueRefArray, data); @@ -328,34 +340,48 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers) GetSize(options, emptyPointerArray, data); GetSize(options, jaggedArray, data); GetSize(options, multiDimensionalArray, data); - GetSize(options, internedStrings, data); - GetSize(options, privateBaseField, data); - GetSize(options, valueTypeWithRefs, data); using (var dt = DataTarget.CreateSnapshotAndAttach(Environment.ProcessId)) { + // Got the snapshot. Release GC. + GC.EndNoGCRegion(); + using (var runtime = dt.ClrVersions.Single().CreateRuntime()) { + Assert.IsTrue(runtime.Heap.CanWalkHeap); + foreach (ulong address in data.Keys) { string currentName = data[address].Name; - var clrObj = runtime.Heap.GetObject(address); - - // Sanity check that address (still) refers to something valid. - Assert.IsTrue(clrObj.IsValid, currentName + " IsValid"); - - // Make sure we are not comparing apples and oranges. - Assert.AreEqual(clrObj.Type?.ToString(), GetClrMDLikeTypeName(data[address].Type), currentName + " Type"); - - // Compare actual sizes - (int count, ulong inclusiveSize, ulong exclusiveSize) = ObjSize(clrObj, options.DebugOutput); - Assert.AreEqual((long)inclusiveSize, data[address].InclusiveSize, currentName + " InclusiveSize"); - Assert.AreEqual((long)exclusiveSize, data[address].ExclusiveSize, currentName + " ExclusiveSize"); - Assert.AreEqual(count, data[address].Count, currentName + " Count"); + try + { + var clrObj = runtime.Heap.GetObject(address); + + // Sanity check that address (still) refers to something valid. This could fail if the object address + // changed in between GetSize() and CreateSnapshotAndAttach(). + Assert.IsTrue(clrObj.IsValid, currentName + " IsValid"); + + // Make sure we are not comparing apples and oranges. + Assert.AreEqual(clrObj.Type?.ToString(), GetClrMDLikeTypeName(data[address].Type), currentName + " Type"); + + // Compare actual sizes + (int count, ulong inclusiveSize, ulong exclusiveSize) = ObjSize(clrObj, options.DebugOutput); + Assert.AreEqual((long)inclusiveSize, data[address].InclusiveSize, currentName + " InclusiveSize"); + Assert.AreEqual((long)exclusiveSize, data[address].ExclusiveSize, currentName + " ExclusiveSize"); + Assert.AreEqual(count, data[address].Count, currentName + " Count"); + } + catch (UnitTestAssertException) + { + throw; + } + catch (Exception ex) + { + throw new Exception($"Handling {currentName}: " + ex.Message, ex); + } } } } diff --git a/src/ManagedObjectSize/ManagedObjectSize.csproj b/src/ManagedObjectSize/ManagedObjectSize.csproj index a1308d3..1e274c5 100644 --- a/src/ManagedObjectSize/ManagedObjectSize.csproj +++ b/src/ManagedObjectSize/ManagedObjectSize.csproj @@ -35,4 +35,8 @@ + + + + diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 46f9dd9..59cb88b 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -1,4 +1,5 @@ -using System.Diagnostics; +using Microsoft.Extensions.ObjectPool; +using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -79,7 +80,7 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? options = (options ?? new()).GetReadOnly(); - var eval = new Stack(); + var eval = options.CreateStack(); var state = new EvaluationState(options); eval.Push(obj); @@ -103,6 +104,9 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); } + options.Return(state.Considered); + options.Return(eval); + return totalSize; } @@ -129,8 +133,8 @@ public Statistics(ObjectSizeOptions options) public void Stop() => m_completed = Stopwatch.GetTimestamp(); public void UpdateConsidered() => m_maxConsidered = Math.Max(++m_considered, m_maxConsidered); public void UpdateSampleConsidered(HashSet considered) => m_sampleMaxConsidered = Math.Max(considered.Count, m_sampleMaxConsidered); - public void UpdateEval(Stack eval) => m_maxEval = Math.Max(eval.Count, m_maxEval); - public void UpdateSampleEval(Stack eval) => m_sampleMaxEval = Math.Max(eval.Count, m_sampleMaxEval); + public void UpdateEval(Stack eval) => m_maxEval = Math.Max(eval.Count, m_maxEval); + public void UpdateSampleEval(Stack eval) => m_sampleMaxEval = Math.Max(eval.Count, m_sampleMaxEval); public void UpdateSampled() => m_sampled++; public void UpdateNotSampled() => m_notSampled++; public void UpdateArrays() => m_arrays++; @@ -155,7 +159,7 @@ public EvaluationState(ObjectSizeOptions options) { Options = options ?? throw new ArgumentNullException(nameof(options)); StopTime = options.GetStopTime(Environment.TickCount64); - Considered = new HashSet(ReferenceEqualityComparer.Instance); + Considered = options.CreateHashSet(); Statistics = options.CollectStatistics ? new(options) : null; } @@ -165,7 +169,7 @@ public EvaluationState(ObjectSizeOptions options) public Statistics? Statistics { get; } } - private static unsafe long ProcessEvaluationStack(Stack eval, ref EvaluationState state, out long count) + private static unsafe long ProcessEvaluationStack(Stack eval, ref EvaluationState state, out long count) { count = 0; long totalSize = 0; @@ -233,7 +237,7 @@ private static unsafe long ProcessEvaluationStack(Stack eval, ref Evalua } else { - AddFields(eval, state.Considered, currentObject, currentType); + AddFields(eval, ref state, currentObject, currentType); } } @@ -250,7 +254,7 @@ private static void CheckStopTime(long stopAt, long totalSize, long count, TimeS } } - private static unsafe void HandleArray(Stack eval, ref EvaluationState state, object obj, Type objType) + private static unsafe void HandleArray(Stack eval, ref EvaluationState state, object obj, Type objType) { var elementType = objType.GetElementType(); if (elementType != null && !elementType.IsPointer) @@ -278,7 +282,7 @@ private static unsafe void HandleArray(Stack eval, ref EvaluationState s } } - private static unsafe void HandleArraySampled(Stack eval, ref EvaluationState state, object obj, Type elementType, int sampleSize) + private static unsafe void HandleArraySampled(Stack eval, ref EvaluationState state, object obj, Type elementType, int sampleSize) { state.Statistics?.UpdateSampled(); @@ -286,8 +290,8 @@ private static unsafe void HandleArraySampled(Stack eval, ref Evaluation // TODO: Should these be from a pool? Measure if cost is too high allocating if we have // a "large" number of arrays to sample. - var localEval = new Stack(); - var localConsidered = new HashSet(ReferenceEqualityComparer.Instance); + var localEval = state.Options.CreateStack(); + var localConsidered = state.Options.CreateHashSet(); foreach (object element in (System.Collections.IEnumerable)obj) { @@ -305,7 +309,7 @@ private static unsafe void HandleArraySampled(Stack eval, ref Evaluation { if (!localConsidered.Contains(element)) { - HandleArrayElement(localEval, localConsidered, elementType, element); + HandleArrayElement(localEval, ref state, elementType, element); localConsidered.Add(element); if (state.Statistics != null) @@ -332,6 +336,9 @@ private static unsafe void HandleArraySampled(Stack eval, ref Evaluation state.Statistics?.UpdateEval(eval); } + + state.Options.Return(localEval); + state.Options.Return(localConsidered); } private static unsafe (int SampleSize, int? PopulationSize, bool Always) GetSampleAndPopulateSize(ref EvaluationState state, object obj, Type elementType) @@ -422,7 +429,7 @@ private static bool HasLessElements(object obj, int max, Type elementType) return true; } - private static unsafe void HandleArrayNonSampled(Stack eval, ref EvaluationState state, object obj, Type elementType) + private static unsafe void HandleArrayNonSampled(Stack eval, ref EvaluationState state, object obj, Type elementType) { state.Statistics?.UpdateNotSampled(); @@ -430,27 +437,27 @@ private static unsafe void HandleArrayNonSampled(Stack eval, ref Evaluat { if (ShouldCountElement(element, elementType)) { - HandleArrayElement(eval, state.Considered, elementType, element); + HandleArrayElement(eval, ref state, elementType, element); } } } - private static unsafe void HandleArrayElement(Stack eval, HashSet considered, Type elementType, object element) + private static unsafe void HandleArrayElement(Stack eval, ref EvaluationState state, Type elementType, object element) { if (!elementType.IsValueType) { - if (!considered.Contains(element)) + if (!state.Considered.Contains(element)) { eval.Push(element); } } else { - AddFields(eval, considered, element, elementType); + AddFields(eval, ref state, element, elementType); } } - private static unsafe void AddFields(Stack eval, HashSet considered, object currentObject, Type objType) + private static unsafe void AddFields(Stack eval, ref EvaluationState state, object currentObject, Type objType) { foreach (var field in GetFields(objType)) { @@ -468,7 +475,7 @@ private static unsafe void AddFields(Stack eval, HashSet conside continue; } - var stack = new Stack(); + var stack = state.Options.CreateStack(); stack.Push(field.GetValue(currentObject)); while (stack.Count > 0) { @@ -493,20 +500,21 @@ private static unsafe void AddFields(Stack eval, HashSet conside else if (value != null) { // Found a reference type field/member inside the value type. - if (!considered.Contains(value) && !eval.Contains(value)) + if (!state.Considered.Contains(value) && !eval.Contains(value)) { eval.Push(value); } } } } + state.Options.Return(stack); } else { var fieldValue = field.GetValue(currentObject); if (fieldValue != null) { - if (!considered.Contains(fieldValue)) + if (!state.Considered.Contains(fieldValue)) { eval.Push(fieldValue); } diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index fcdc5fc..9e3b629 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -1,4 +1,5 @@  +using Microsoft.Extensions.ObjectPool; using System.Text; using System.Threading; @@ -16,7 +17,60 @@ public class ObjectSizeOptions private double? m_arraySampleConfidenceLevel; private int m_arraySampleConfidenceInterval = 5; private bool m_alwaysUseArraySampleAlgorithm; + private ObjectPoolProvider? m_poolProvider; + private ObjectPool>? m_stackPool; + private ObjectPool>? m_hashSetPool; + + public ObjectPoolProvider? PoolProvider + { + get => m_poolProvider; + set + { + CheckReadOnly(); + + if (value != null) + { + m_stackPool = value.Create(StackPolicy.Instance); + m_hashSetPool = value.Create(HashSetPolicy.Instance); + } + else + { + m_stackPool = null; + m_hashSetPool = null; + } + + m_poolProvider = value; + } + } + + private class StackPolicy : IPooledObjectPolicy> + { + public readonly static StackPolicy Instance = new(); + public Stack Create() => new(); + public bool Return(Stack obj) + { + obj.Clear(); + return true; + } + } + private class HashSetPolicy : IPooledObjectPolicy> + { + public readonly static HashSetPolicy Instance = new(); + public HashSet Create() => new(ReferenceEqualityComparer.Instance); + public bool Return(HashSet obj) + { + obj.Clear(); + return true; + } + } + + internal HashSet CreateHashSet() => m_hashSetPool?.Get() ?? HashSetPolicy.Instance.Create(); + internal Stack CreateStack() => m_stackPool?.Get() ?? StackPolicy.Instance.Create(); + + internal void Return(HashSet obj) => m_hashSetPool?.Return(obj); + internal void Return(Stack obj) => m_stackPool?.Return(obj); + public CancellationToken CancellationToken { get => m_cancellationToken; @@ -205,6 +259,12 @@ internal ObjectSizeOptions GetReadOnly() CancellationToken = m_cancellationToken, DebugWriter = m_debugWriter, CollectStatistics = m_collectStatistics, + // Copy these using backing fields, not PoolProvider property. + // We want to use the actual pool instances and not create new ones. + m_poolProvider = m_poolProvider, + m_stackPool = m_stackPool, + m_hashSetPool = m_hashSetPool, + IsReadOnly = true }; return result; From 6aef32f1aff441ecc0c39472fa0d01de75d78dfd Mon Sep 17 00:00:00 2001 From: cnkz Date: Mon, 8 Jan 2024 15:26:42 +0100 Subject: [PATCH 11/11] Restructuring, introduce ObjectPool abstraction. --- LICENSE.txt | 2 +- .../ManagedObjectSize.Benchmarks.csproj | 3 +- .../ObjectPoolBenchmarks.cs | 7 +- src/ManagedObjectSize.Benchmarks/Program.cs | 10 +- .../ManagedObjectSize.ObjectPool.csproj | 40 ++ ...crosoftExtensionsObjectPoolPoolProvider.cs | 45 +++ .../ObjectPoolExtensions.cs | 28 ++ .../ManagedObjectSize.Tests.csproj | 1 + .../ObjectSizeTests.cs | 4 +- src/ManagedObjectSize.sln | 17 +- .../ManagedObjectSize.csproj | 6 +- src/ManagedObjectSize/ObjectSize.cs | 163 ++++---- src/ManagedObjectSize/ObjectSizeOptions.cs | 123 ++++-- src/ManagedObjectSize/Pooling/IPoolPolicy.cs | 8 + .../Pooling/NoopPoolProvider.cs | 15 + src/ManagedObjectSize/Pooling/Pool.cs | 8 + src/ManagedObjectSize/Pooling/PoolProvider.cs | 14 + src/ManagedObjectSize/Utils.cs | 370 +++++++++--------- src/SampleApp/Program.cs | 26 +- src/SampleApp/SampleApp.csproj | 21 +- 20 files changed, 579 insertions(+), 332 deletions(-) create mode 100644 src/ManagedObjectSize.ObjectPool/ManagedObjectSize.ObjectPool.csproj create mode 100644 src/ManagedObjectSize.ObjectPool/MicrosoftExtensionsObjectPoolPoolProvider.cs create mode 100644 src/ManagedObjectSize.ObjectPool/ObjectPoolExtensions.cs create mode 100644 src/ManagedObjectSize/Pooling/IPoolPolicy.cs create mode 100644 src/ManagedObjectSize/Pooling/NoopPoolProvider.cs create mode 100644 src/ManagedObjectSize/Pooling/Pool.cs create mode 100644 src/ManagedObjectSize/Pooling/PoolProvider.cs diff --git a/LICENSE.txt b/LICENSE.txt index 2e670d2..4838790 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Christian Klutz +Copyright (c) 2024 Christian Klutz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj b/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj index c4323e3..aa50684 100644 --- a/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj +++ b/src/ManagedObjectSize.Benchmarks/ManagedObjectSize.Benchmarks.csproj @@ -12,7 +12,8 @@ - + + diff --git a/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs b/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs index 81321e5..ea721df 100644 --- a/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs +++ b/src/ManagedObjectSize.Benchmarks/ObjectPoolBenchmarks.cs @@ -1,5 +1,5 @@ using BenchmarkDotNet.Attributes; -using Microsoft.Extensions.ObjectPool; +using ManagedObjectSize.ObjectPool; namespace ManagedObjectSize.Benchmarks { @@ -15,10 +15,7 @@ public class ObjectPoolBenchmarks public void GlobalSetup() { m_graphData = GraphObject.CreateObjectGraph(N); - m_options = new ObjectSizeOptions - { - PoolProvider = new DefaultObjectPoolProvider() - }; + m_options = new ObjectSizeOptions().UseMicrosoftExtensionsObjectPool(); } [Benchmark] public long NoPool() => ObjectSize.GetObjectInclusiveSize(m_graphData); diff --git a/src/ManagedObjectSize.Benchmarks/Program.cs b/src/ManagedObjectSize.Benchmarks/Program.cs index b34af6a..29c0a13 100644 --- a/src/ManagedObjectSize.Benchmarks/Program.cs +++ b/src/ManagedObjectSize.Benchmarks/Program.cs @@ -1,9 +1,15 @@ -using BenchmarkDotNet.Running; +using BenchmarkDotNet.Configs; +using BenchmarkDotNet.Running; namespace ManagedObjectSize.Benchmarks { internal class Program { - public static void Main(string[] args) => BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); + public static void Main(string[] args) + { + var xargs = new List(args); + var config = ManualConfig.Create(DefaultConfig.Instance); + BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(xargs.ToArray(), config); + } } } diff --git a/src/ManagedObjectSize.ObjectPool/ManagedObjectSize.ObjectPool.csproj b/src/ManagedObjectSize.ObjectPool/ManagedObjectSize.ObjectPool.csproj new file mode 100644 index 0000000..958ceb9 --- /dev/null +++ b/src/ManagedObjectSize.ObjectPool/ManagedObjectSize.ObjectPool.csproj @@ -0,0 +1,40 @@ + + + + net6.0 + enable + enable + true + + true + ManagedObjectSize.ObjectPool + 0.0.6 + Christian Klutz + + Adapts Microsoft.Extensions.ObjectPool to be used with ManagedObjectSize package. + + $(Version).0 + $(Version).0 + LICENSE.txt + https://github.com/cklutz/ManagedObjectSize + git + true + + + + + \ + true + + + + + + + + + + + + + diff --git a/src/ManagedObjectSize.ObjectPool/MicrosoftExtensionsObjectPoolPoolProvider.cs b/src/ManagedObjectSize.ObjectPool/MicrosoftExtensionsObjectPoolPoolProvider.cs new file mode 100644 index 0000000..ab0d369 --- /dev/null +++ b/src/ManagedObjectSize.ObjectPool/MicrosoftExtensionsObjectPoolPoolProvider.cs @@ -0,0 +1,45 @@ +using ManagedObjectSize.Pooling; +using Microsoft.Extensions.ObjectPool; + +namespace ManagedObjectSize.ObjectPool +{ + /// + /// Adapts a to be used + /// as . + /// + public class MicrosoftExtensionsObjectPoolPoolProvider : PoolProvider + { + private class PolicyAdapter : IPooledObjectPolicy where T : notnull + { + private readonly IPoolPolicy m_policy; + public PolicyAdapter(IPoolPolicy policy) => m_policy = policy; + public T Create() => m_policy.Create(); + public bool Return(T obj) => m_policy.Return(obj); + } + + private class PoolAdapter : Pool where T : class + { + private readonly ObjectPool m_pool; + public PoolAdapter(ObjectPool pool) => m_pool = pool; + public override T Get() => m_pool.Get(); + public override void Return(T obj) => m_pool.Return(obj); + } + + private readonly ObjectPoolProvider m_provider; + + public MicrosoftExtensionsObjectPoolPoolProvider() + : this(new DefaultObjectPoolProvider()) + { + } + + public MicrosoftExtensionsObjectPoolPoolProvider(ObjectPoolProvider objectPoolProvider) + { + m_provider = objectPoolProvider ?? throw new ArgumentNullException(nameof(objectPoolProvider)); + } + + public override Pool Create(IPoolPolicy policy) + { + return new PoolAdapter(m_provider.Create(new PolicyAdapter(policy))); + } + } +} diff --git a/src/ManagedObjectSize.ObjectPool/ObjectPoolExtensions.cs b/src/ManagedObjectSize.ObjectPool/ObjectPoolExtensions.cs new file mode 100644 index 0000000..f72d059 --- /dev/null +++ b/src/ManagedObjectSize.ObjectPool/ObjectPoolExtensions.cs @@ -0,0 +1,28 @@ +using Microsoft.Extensions.ObjectPool; + +namespace ManagedObjectSize.ObjectPool +{ + public static class ObjectPoolExtensions + { + /// + /// Configures to use an object pool based on . + /// + /// The options instance. + /// + /// The to be used. If null, an instance of the will be used. + /// The options instanced given as . + public static ObjectSizeOptions UseMicrosoftExtensionsObjectPool(this ObjectSizeOptions options, ObjectPoolProvider? provider = null) + { + if (provider == null) + { + options.PoolProvider = new MicrosoftExtensionsObjectPoolPoolProvider(); + } + else + { + options.PoolProvider = new MicrosoftExtensionsObjectPoolPoolProvider(provider); + } + + return options; + } + } +} diff --git a/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj b/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj index 5793873..7c90ea8 100644 --- a/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj +++ b/src/ManagedObjectSize.Tests/ManagedObjectSize.Tests.csproj @@ -19,6 +19,7 @@ + diff --git a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs index 73a7bbc..36bf158 100644 --- a/src/ManagedObjectSize.Tests/ObjectSizeTests.cs +++ b/src/ManagedObjectSize.Tests/ObjectSizeTests.cs @@ -2,6 +2,8 @@ using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Text; +using ManagedObjectSize.ObjectPool; +using ManagedObjectSize.Pooling; using Microsoft.Diagnostics.Runtime; using Microsoft.Extensions.ObjectPool; @@ -305,7 +307,7 @@ public unsafe void ObjectSize_ReportsCorrectSize(bool useRtHelpers, bool useObje //options.DebugOutput = true; if (useObjectPool) { - options.PoolProvider = new DefaultObjectPoolProvider(); + options.UseMicrosoftExtensionsObjectPool(); } // We require the addresses of the test objects to not change. We determine the address during GetSize() diff --git a/src/ManagedObjectSize.sln b/src/ManagedObjectSize.sln index d76db4a..069323c 100644 --- a/src/ManagedObjectSize.sln +++ b/src/ManagedObjectSize.sln @@ -9,7 +9,13 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize.Tests", " EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SampleApp", "SampleApp\SampleApp.csproj", "{1805F1BA-EB96-47F3-99F4-7B35F1613679}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ManagedObjectSize.Benchmarks", "ManagedObjectSize.Benchmarks\ManagedObjectSize.Benchmarks.csproj", "{F5535CC5-2FC0-4594-878D-DCEAF4D66C06}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ManagedObjectSize.Benchmarks", "ManagedObjectSize.Benchmarks\ManagedObjectSize.Benchmarks.csproj", "{F5535CC5-2FC0-4594-878D-DCEAF4D66C06}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ManagedObjectSize.ObjectPool", "ManagedObjectSize.ObjectPool\ManagedObjectSize.ObjectPool.csproj", "{F0F298B0-5C6A-49BE-A019-8703A42C70AC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{C4914AFB-28DF-41AD-B56F-88D2B6A40476}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{7FEE09B8-7DBA-4499-93FA-0BBF85993D1C}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -33,10 +39,19 @@ Global {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Debug|Any CPU.Build.0 = Debug|Any CPU {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Release|Any CPU.ActiveCfg = Release|Any CPU {F5535CC5-2FC0-4594-878D-DCEAF4D66C06}.Release|Any CPU.Build.0 = Release|Any CPU + {F0F298B0-5C6A-49BE-A019-8703A42C70AC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F0F298B0-5C6A-49BE-A019-8703A42C70AC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F0F298B0-5C6A-49BE-A019-8703A42C70AC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F0F298B0-5C6A-49BE-A019-8703A42C70AC}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {F1B089A0-FF8D-4CD9-97FE-29826DAFEB91} = {C4914AFB-28DF-41AD-B56F-88D2B6A40476} + {1805F1BA-EB96-47F3-99F4-7B35F1613679} = {7FEE09B8-7DBA-4499-93FA-0BBF85993D1C} + {F5535CC5-2FC0-4594-878D-DCEAF4D66C06} = {C4914AFB-28DF-41AD-B56F-88D2B6A40476} + EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {EDB31B9A-E707-444C-952C-3B039CA17B63} EndGlobalSection diff --git a/src/ManagedObjectSize/ManagedObjectSize.csproj b/src/ManagedObjectSize/ManagedObjectSize.csproj index 1e274c5..b633bad 100644 --- a/src/ManagedObjectSize/ManagedObjectSize.csproj +++ b/src/ManagedObjectSize/ManagedObjectSize.csproj @@ -8,7 +8,7 @@ true ManagedObjectSize - 0.0.5 + 0.0.6 Christian Klutz Attempts to calculate the memory size of managed objects from within the application itself. @@ -35,8 +35,4 @@ - - - - diff --git a/src/ManagedObjectSize/ObjectSize.cs b/src/ManagedObjectSize/ObjectSize.cs index 59cb88b..b37738c 100644 --- a/src/ManagedObjectSize/ObjectSize.cs +++ b/src/ManagedObjectSize/ObjectSize.cs @@ -1,5 +1,4 @@ -using Microsoft.Extensions.ObjectPool; -using System.Diagnostics; +using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -82,31 +81,37 @@ public static unsafe long GetObjectInclusiveSize(object? obj, ObjectSizeOptions? var eval = options.CreateStack(); var state = new EvaluationState(options); + long totalSize; - eval.Push(obj); - - if (state.Statistics != null) + try { - state.Statistics.Start(); - state.Statistics.UpdateEval(eval); - } + eval.Push(obj); - long totalSize = ProcessEvaluationStack(eval, ref state, out count); + if (state.Statistics != null) + { + state.Statistics.Start(); + state.Statistics.UpdateEval(eval); + } - if (state.Statistics != null) - { - state.Statistics.Stop(); - state.Statistics.Dump(totalSize); - } + totalSize = ProcessEvaluationStack(eval, ref state, out count); - if (options.DebugOutput) + if (state.Statistics != null) + { + state.Statistics.Stop(); + state.Statistics.Dump(totalSize); + } + + if (options.DebugOutput) + { + state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); + } + } + finally { - state.Options.DebugWriter.WriteLine($"total: {totalSize:N0} ({obj.GetType()})"); + options.Return(state.Considered); + options.Return(eval); } - options.Return(state.Considered); - options.Return(eval); - return totalSize; } @@ -287,58 +292,60 @@ private static unsafe void HandleArraySampled(Stack eval, ref Evaluatio state.Statistics?.UpdateSampled(); int elementCount = 0; - - // TODO: Should these be from a pool? Measure if cost is too high allocating if we have - // a "large" number of arrays to sample. var localEval = state.Options.CreateStack(); var localConsidered = state.Options.CreateHashSet(); - foreach (object element in (System.Collections.IEnumerable)obj) + try { - if (ShouldCountElement(element, elementType)) + foreach (object element in (System.Collections.IEnumerable)obj) { - // We're only counting the elements that are actually non-null. This might - // be less then the size of the array, when the array contains null elements. - // On the other hand, if we could every element, we also count excess elements. - // For example, the extra (unused) capacity of a List<>. - // Only considering non-null elements is still correct, however, because null - // elements don't contribute to the size. - elementCount++; - - if (elementCount <= sampleSize) + if (ShouldCountElement(element, elementType)) { - if (!localConsidered.Contains(element)) + // We're only counting the elements that are actually non-null. This might + // be less then the size of the array, when the array contains null elements. + // On the other hand, if we could every element, we also count excess elements. + // For example, the extra (unused) capacity of a List<>. + // Only considering non-null elements is still correct, however, because null + // elements don't contribute to the size. + elementCount++; + + if (elementCount <= sampleSize) { - HandleArrayElement(localEval, ref state, elementType, element); - localConsidered.Add(element); - - if (state.Statistics != null) + if (!localConsidered.Contains(element)) { - state.Statistics.UpdateSampleConsidered(localConsidered); - state.Statistics.UpdateSampleEval(localEval); + HandleArrayElement(localEval, ref state, elementType, element); + localConsidered.Add(element); + + if (state.Statistics != null) + { + state.Statistics.UpdateSampleConsidered(localConsidered); + state.Statistics.UpdateSampleEval(localEval); + } } } } } - } - if (localEval.Count > 0) - { - double sizeOfSamples = ProcessEvaluationStack(localEval, ref state, out _); - - var sample = new ArraySample + if (localEval.Count > 0) { - Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), - ElementCount = elementCount - }; + double sizeOfSamples = ProcessEvaluationStack(localEval, ref state, out _); - eval.Push(sample); + var sample = new ArraySample + { + Size = (long)((sizeOfSamples / localConsidered.Count) * elementCount), + ElementCount = elementCount + }; - state.Statistics?.UpdateEval(eval); - } + eval.Push(sample); - state.Options.Return(localEval); - state.Options.Return(localConsidered); + state.Statistics?.UpdateEval(eval); + } + } + finally + { + state.Options.Return(localEval); + state.Options.Return(localConsidered); + } } private static unsafe (int SampleSize, int? PopulationSize, bool Always) GetSampleAndPopulateSize(ref EvaluationState state, object obj, Type elementType) @@ -476,38 +483,44 @@ private static unsafe void AddFields(Stack eval, ref EvaluationState st } var stack = state.Options.CreateStack(); - stack.Push(field.GetValue(currentObject)); - while (stack.Count > 0) + try { - var currentValue = stack.Pop(); - if (currentValue == null) + stack.Push(field.GetValue(currentObject)); + while (stack.Count > 0) { - continue; - } + var currentValue = stack.Pop(); + if (currentValue == null) + { + continue; + } - var fields = GetFields(currentValue.GetType()); - foreach (var f in fields) - { - object? value = f.GetValue(currentValue); - if (f.FieldType.IsValueType) + var fields = GetFields(currentValue.GetType()); + foreach (var f in fields) { - // Check if field's type contains further reference type fields. - if (IsReferenceOrContainsReferences(f.FieldType)) + object? value = f.GetValue(currentValue); + if (f.FieldType.IsValueType) { - stack.Push(value); + // Check if field's type contains further reference type fields. + if (IsReferenceOrContainsReferences(f.FieldType)) + { + stack.Push(value); + } } - } - else if (value != null) - { - // Found a reference type field/member inside the value type. - if (!state.Considered.Contains(value) && !eval.Contains(value)) + else if (value != null) { - eval.Push(value); + // Found a reference type field/member inside the value type. + if (!state.Considered.Contains(value) && !eval.Contains(value)) + { + eval.Push(value); + } } } } } - state.Options.Return(stack); + finally + { + state.Options.Return(stack); + } } else { diff --git a/src/ManagedObjectSize/ObjectSizeOptions.cs b/src/ManagedObjectSize/ObjectSizeOptions.cs index 9e3b629..a7960e5 100644 --- a/src/ManagedObjectSize/ObjectSizeOptions.cs +++ b/src/ManagedObjectSize/ObjectSizeOptions.cs @@ -1,10 +1,12 @@ - -using Microsoft.Extensions.ObjectPool; -using System.Text; -using System.Threading; +using System.Text; namespace ManagedObjectSize -{ +{ + /// + /// Customizes behavior of calculating the size of a managed object. + /// + /// + /// public class ObjectSizeOptions { private bool m_debugOutput; @@ -17,60 +19,76 @@ public class ObjectSizeOptions private double? m_arraySampleConfidenceLevel; private int m_arraySampleConfidenceInterval = 5; private bool m_alwaysUseArraySampleAlgorithm; - private ObjectPoolProvider? m_poolProvider; - private ObjectPool>? m_stackPool; - private ObjectPool>? m_hashSetPool; - - public ObjectPoolProvider? PoolProvider + private Pooling.PoolProvider? m_poolProvider; + private Pooling.Pool>? m_stackPool; + private Pooling.Pool>? m_hashSetPool; + + /// + /// Gets or sets the pool provider to use. By default, pooling is not used. + /// + /// + /// A reference to the pool provider to use. If null, then object pooling will not be used. + /// If previously non-null, but then set to null, existing pools will be abandoned and are + /// eligable for garbage collection. + /// + public Pooling.PoolProvider? PoolProvider { get => m_poolProvider; set { CheckReadOnly(); - if (value != null) - { - m_stackPool = value.Create(StackPolicy.Instance); - m_hashSetPool = value.Create(HashSetPolicy.Instance); - } - else - { - m_stackPool = null; - m_hashSetPool = null; + m_poolProvider = value; + + if (m_poolProvider != null) + { + m_stackPool = m_poolProvider.Create(StackPolicy.Instance); + m_hashSetPool = m_poolProvider.Create(HashSetPolicy.Instance); } + else + { + // Custom pool implementations might be disposable. + // Typically (using Microsoft.Extensions.ObjectPool) they are not. + // Here we simply abondon the pools and the objects they may hold, + // letting the GC take care of them eventually. - m_poolProvider = value; + (m_stackPool as IDisposable)?.Dispose(); + m_stackPool = null; + + (m_hashSetPool as IDisposable)?.Dispose(); + m_hashSetPool = null; + } } } - private class StackPolicy : IPooledObjectPolicy> - { - public readonly static StackPolicy Instance = new(); - public Stack Create() => new(); - public bool Return(Stack obj) - { - obj.Clear(); - return true; - } + private class StackPolicy : Pooling.IPoolPolicy> + { + public readonly static StackPolicy Instance = new(); + public Stack Create() => new(); + public bool Return(Stack obj) + { + obj?.Clear(); + return obj is not null; + } } - private class HashSetPolicy : IPooledObjectPolicy> - { - public readonly static HashSetPolicy Instance = new(); - public HashSet Create() => new(ReferenceEqualityComparer.Instance); - public bool Return(HashSet obj) - { - obj.Clear(); - return true; - } + internal Stack CreateStack() => m_stackPool?.Get() ?? StackPolicy.Instance.Create(); + internal void Return(Stack obj) => m_stackPool?.Return(obj); + + private class HashSetPolicy : Pooling.IPoolPolicy> + { + public readonly static HashSetPolicy Instance = new(); + public HashSet Create() => new(ReferenceEqualityComparer.Instance); + public bool Return(HashSet obj) + { + obj?.Clear(); + return obj is not null; + } } internal HashSet CreateHashSet() => m_hashSetPool?.Get() ?? HashSetPolicy.Instance.Create(); - internal Stack CreateStack() => m_stackPool?.Get() ?? StackPolicy.Instance.Create(); - internal void Return(HashSet obj) => m_hashSetPool?.Return(obj); - internal void Return(Stack obj) => m_stackPool?.Return(obj); - + public CancellationToken CancellationToken { get => m_cancellationToken; @@ -117,6 +135,10 @@ public bool DebugOutput } } + /// + /// Gets or sets a value whether statistics should be collected. Statistics, if enabled, are written to + /// . + /// public bool CollectStatistics { get => m_collectStatistics; @@ -127,7 +149,12 @@ public bool CollectStatistics } } - public bool UseRtHelpers + /// + /// TEST USE ONLY Gets or sets a value that causes internals for CLR to be used to access + /// an object's interna (based on RuntimeHelpers). Use this only for testing and comparison + /// with the features of this library. + /// + internal bool UseRtHelpers { get => m_useRtHelpers; set @@ -259,6 +286,7 @@ internal ObjectSizeOptions GetReadOnly() CancellationToken = m_cancellationToken, DebugWriter = m_debugWriter, CollectStatistics = m_collectStatistics, + // Copy these using backing fields, not PoolProvider property. // We want to use the actual pool instances and not create new ones. m_poolProvider = m_poolProvider, @@ -346,6 +374,15 @@ public string GetEnabledString() sb.Append(nameof(CollectStatistics)).Append("=true"); } + if (PoolProvider != null) + { + if (sb.Length > 0) + { + sb.Append(' '); + } + + sb.Append(nameof(PoolProvider)).Append("=(present)"); + } if (sb.Length == 0) { diff --git a/src/ManagedObjectSize/Pooling/IPoolPolicy.cs b/src/ManagedObjectSize/Pooling/IPoolPolicy.cs new file mode 100644 index 0000000..2bebdfd --- /dev/null +++ b/src/ManagedObjectSize/Pooling/IPoolPolicy.cs @@ -0,0 +1,8 @@ +namespace ManagedObjectSize.Pooling +{ + public interface IPoolPolicy + { + public abstract T Create(); + public abstract bool Return(T value); + } +} diff --git a/src/ManagedObjectSize/Pooling/NoopPoolProvider.cs b/src/ManagedObjectSize/Pooling/NoopPoolProvider.cs new file mode 100644 index 0000000..8ee87a6 --- /dev/null +++ b/src/ManagedObjectSize/Pooling/NoopPoolProvider.cs @@ -0,0 +1,15 @@ +namespace ManagedObjectSize.Pooling +{ + public class NoopPoolProvider : PoolProvider + { + public override Pool Create(IPoolPolicy policy) => new NoopPool(policy); + + private class NoopPool : Pool where T : class + { + private readonly IPoolPolicy m_policy; + public NoopPool(IPoolPolicy policy) => m_policy = policy; + public override T Get() => m_policy.Create(); + public override void Return(T value) => m_policy.Return(value); + } + } +} diff --git a/src/ManagedObjectSize/Pooling/Pool.cs b/src/ManagedObjectSize/Pooling/Pool.cs new file mode 100644 index 0000000..15d07d2 --- /dev/null +++ b/src/ManagedObjectSize/Pooling/Pool.cs @@ -0,0 +1,8 @@ +namespace ManagedObjectSize.Pooling +{ + public abstract class Pool where T : class + { + public abstract T Get(); + public abstract void Return(T value); + } +} diff --git a/src/ManagedObjectSize/Pooling/PoolProvider.cs b/src/ManagedObjectSize/Pooling/PoolProvider.cs new file mode 100644 index 0000000..6fa6ddd --- /dev/null +++ b/src/ManagedObjectSize/Pooling/PoolProvider.cs @@ -0,0 +1,14 @@ +namespace ManagedObjectSize.Pooling +{ + public abstract class PoolProvider + { + public virtual Pool Create() where T : class, new() => Create(new DefaultPoolPolicy()); + public abstract Pool Create(IPoolPolicy policy) where T : class; + } + + internal class DefaultPoolPolicy : IPoolPolicy where T: class, new() + { + public T Create() => new T(); + public bool Return(T value) => true; + } +} diff --git a/src/ManagedObjectSize/Utils.cs b/src/ManagedObjectSize/Utils.cs index 1d8af2b..f5173ff 100644 --- a/src/ManagedObjectSize/Utils.cs +++ b/src/ManagedObjectSize/Utils.cs @@ -1,9 +1,9 @@ -using System; -using System.Runtime.CompilerServices; - -namespace ManagedObjectSize -{ - public static class Utils +using System; +using System.Runtime.CompilerServices; + +namespace ManagedObjectSize +{ + public static class Utils { /// /// Returns a pointer to the object on the managed heap. The resulting address is not fixed, @@ -11,182 +11,182 @@ public static class Utils /// /// /// - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static unsafe IntPtr GetVolatileHeapPointer(object @object) - { - var indirect = Unsafe.AsPointer(ref @object); - return **(IntPtr**)(&indirect); - } - - /// - /// Calculate required sample count for a given confidence level, interval and populate size. - /// - /// - /// - /// - /// - public static int CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize) - { - if (populationSize <= 0) - { - return 0; - } - - double Z = QNorm((1 - confidenceLevel) / 2, 0.0, 1.0, true, false); - double p = 0.5; - double c = (double)confidenceInterval / 100; - double ss = (Math.Pow(Z, 2) * p * (1 - p)) / Math.Pow(c, 2); - double finiteSS = ss / (1 + ((ss - 1) / populationSize)); - - return (int)Math.Round(finiteSS); - } - - /// - /// Quantile function (Inverse CDF) for the normal distribution. - /// - /// Probability. - /// Mean of normal distribution. - /// Standard deviation of normal distribution. - /// If true, probability is P[X <= x], otherwise P[X > x]. - /// If true, probabilities are given as log(p). - /// P[X <= x] where x ~ N(mu,sigma^2) - /// See https://svn.r-project.org/R/trunk/src/nmath/qnorm.c - /// See https://stackoverflow.com/a/1674554/21567 - private static double QNorm(double p, double mu, double sigma, bool lower_tail, bool log_p) - { - if (double.IsNaN(p) || double.IsNaN(mu) || double.IsNaN(sigma)) return (p + mu + sigma); - double ans; - bool isBoundaryCase = R_Q_P01_boundaries(p, double.NegativeInfinity, double.PositiveInfinity, lower_tail, log_p, out ans); - if (isBoundaryCase) return (ans); - if (sigma < 0) return (double.NaN); - if (sigma == 0) return (mu); - - double p_ = R_DT_qIv(p, lower_tail, log_p); - double q = p_ - 0.5; - double r, val; - - if (Math.Abs(q) <= 0.425) // 0.075 <= p <= 0.925 - { - r = .180625 - q * q; - val = q * (((((((r * 2509.0809287301226727 + - 33430.575583588128105) * r + 67265.770927008700853) * r + - 45921.953931549871457) * r + 13731.693765509461125) * r + - 1971.5909503065514427) * r + 133.14166789178437745) * r + - 3.387132872796366608) - / (((((((r * 5226.495278852854561 + - 28729.085735721942674) * r + 39307.89580009271061) * r + - 21213.794301586595867) * r + 5394.1960214247511077) * r + - 687.1870074920579083) * r + 42.313330701600911252) * r + 1.0); - } - else - { - r = q > 0 ? R_DT_CIv(p, lower_tail, log_p) : p_; - r = Math.Sqrt(-((log_p && ((lower_tail && q <= 0) || (!lower_tail && q > 0))) ? p : Math.Log(r))); - - if (r <= 5) // <==> min(p,1-p) >= exp(-25) ~= 1.3888e-11 - { - r -= 1.6; - val = (((((((r * 7.7454501427834140764e-4 + - .0227238449892691845833) * r + .24178072517745061177) * - r + 1.27045825245236838258) * r + - 3.64784832476320460504) * r + 5.7694972214606914055) * - r + 4.6303378461565452959) * r + - 1.42343711074968357734) - / (((((((r * - 1.05075007164441684324e-9 + 5.475938084995344946e-4) * - r + .0151986665636164571966) * r + - .14810397642748007459) * r + .68976733498510000455) * - r + 1.6763848301838038494) * r + - 2.05319162663775882187) * r + 1.0); - } - else // very close to 0 or 1 - { - r -= 5.0; - val = (((((((r * 2.01033439929228813265e-7 + - 2.71155556874348757815e-5) * r + - .0012426609473880784386) * r + .026532189526576123093) * - r + .29656057182850489123) * r + - 1.7848265399172913358) * r + 5.4637849111641143699) * - r + 6.6579046435011037772) - / (((((((r * - 2.04426310338993978564e-15 + 1.4215117583164458887e-7) * - r + 1.8463183175100546818e-5) * r + - 7.868691311456132591e-4) * r + .0148753612908506148525) - * r + .13692988092273580531) * r + - .59983220655588793769) * r + 1.0); - } - if (q < 0.0) val = -val; - } - - return (mu + sigma * val); - } - private static bool R_Q_P01_boundaries(double p, double _LEFT_, double _RIGHT_, bool lower_tail, bool log_p, out double ans) - { - if (log_p) - { - if (p > 0.0) - { - ans = double.NaN; - return (true); - } - if (p == 0.0) - { - ans = lower_tail ? _RIGHT_ : _LEFT_; - return (true); - } - if (p == double.NegativeInfinity) - { - ans = lower_tail ? _LEFT_ : _RIGHT_; - return (true); - } - } - else - { - if (p < 0.0 || p > 1.0) - { - ans = double.NaN; - return (true); - } - if (p == 0.0) - { - ans = lower_tail ? _LEFT_ : _RIGHT_; - return (true); - } - if (p == 1.0) - { - ans = lower_tail ? _RIGHT_ : _LEFT_; - return (true); - } - } - ans = double.NaN; - return (false); - } - - private static double R_DT_qIv(double p, bool lower_tail, bool log_p) - { - return (log_p ? (lower_tail ? Math.Exp(p) : -ExpM1(p)) : R_D_Lval(p, lower_tail)); - } - - private static double R_DT_CIv(double p, bool lower_tail, bool log_p) - { - return (log_p ? (lower_tail ? -ExpM1(p) : Math.Exp(p)) : R_D_Cval(p, lower_tail)); - } - - private static double R_D_Lval(double p, bool lower_tail) - { - return lower_tail ? p : 0.5 - p + 0.5; - } - - private static double R_D_Cval(double p, bool lower_tail) - { - return lower_tail ? 0.5 - p + 0.5 : p; - } - private static double ExpM1(double x) - { - if (Math.Abs(x) < 1e-5) - return x + 0.5 * x * x; - else - return Math.Exp(x) - 1.0; - } - } -} + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static unsafe IntPtr GetVolatileHeapPointer(object @object) + { + var indirect = Unsafe.AsPointer(ref @object); + return **(IntPtr**)(&indirect); + } + + /// + /// Calculate required sample count for a given confidence level, interval and populate size. + /// + /// + /// + /// + /// + public static int CalculateSampleCount(double confidenceLevel, int confidenceInterval, int populationSize) + { + if (populationSize <= 0) + { + return 0; + } + + double Z = QNorm((1 - confidenceLevel) / 2, 0.0, 1.0, true, false); + double p = 0.5; + double c = (double)confidenceInterval / 100; + double ss = (Math.Pow(Z, 2) * p * (1 - p)) / Math.Pow(c, 2); + double finiteSS = ss / (1 + ((ss - 1) / populationSize)); + + return (int)Math.Round(finiteSS); + } + + /// + /// Quantile function (Inverse CDF) for the normal distribution. + /// + /// Probability. + /// Mean of normal distribution. + /// Standard deviation of normal distribution. + /// If true, probability is P[X <= x], otherwise P[X > x]. + /// If true, probabilities are given as log(p). + /// P[X <= x] where x ~ N(mu,sigma^2) + /// See https://svn.r-project.org/R/trunk/src/nmath/qnorm.c + /// See https://stackoverflow.com/a/1674554/21567 + private static double QNorm(double p, double mu, double sigma, bool lower_tail, bool log_p) + { + if (double.IsNaN(p) || double.IsNaN(mu) || double.IsNaN(sigma)) return (p + mu + sigma); + double ans; + bool isBoundaryCase = R_Q_P01_boundaries(p, double.NegativeInfinity, double.PositiveInfinity, lower_tail, log_p, out ans); + if (isBoundaryCase) return (ans); + if (sigma < 0) return (double.NaN); + if (sigma == 0) return (mu); + + double p_ = R_DT_qIv(p, lower_tail, log_p); + double q = p_ - 0.5; + double r, val; + + if (Math.Abs(q) <= 0.425) // 0.075 <= p <= 0.925 + { + r = .180625 - q * q; + val = q * (((((((r * 2509.0809287301226727 + + 33430.575583588128105) * r + 67265.770927008700853) * r + + 45921.953931549871457) * r + 13731.693765509461125) * r + + 1971.5909503065514427) * r + 133.14166789178437745) * r + + 3.387132872796366608) + / (((((((r * 5226.495278852854561 + + 28729.085735721942674) * r + 39307.89580009271061) * r + + 21213.794301586595867) * r + 5394.1960214247511077) * r + + 687.1870074920579083) * r + 42.313330701600911252) * r + 1.0); + } + else + { + r = q > 0 ? R_DT_CIv(p, lower_tail, log_p) : p_; + r = Math.Sqrt(-((log_p && ((lower_tail && q <= 0) || (!lower_tail && q > 0))) ? p : Math.Log(r))); + + if (r <= 5) // <==> min(p,1-p) >= exp(-25) ~= 1.3888e-11 + { + r -= 1.6; + val = (((((((r * 7.7454501427834140764e-4 + + .0227238449892691845833) * r + .24178072517745061177) * + r + 1.27045825245236838258) * r + + 3.64784832476320460504) * r + 5.7694972214606914055) * + r + 4.6303378461565452959) * r + + 1.42343711074968357734) + / (((((((r * + 1.05075007164441684324e-9 + 5.475938084995344946e-4) * + r + .0151986665636164571966) * r + + .14810397642748007459) * r + .68976733498510000455) * + r + 1.6763848301838038494) * r + + 2.05319162663775882187) * r + 1.0); + } + else // very close to 0 or 1 + { + r -= 5.0; + val = (((((((r * 2.01033439929228813265e-7 + + 2.71155556874348757815e-5) * r + + .0012426609473880784386) * r + .026532189526576123093) * + r + .29656057182850489123) * r + + 1.7848265399172913358) * r + 5.4637849111641143699) * + r + 6.6579046435011037772) + / (((((((r * + 2.04426310338993978564e-15 + 1.4215117583164458887e-7) * + r + 1.8463183175100546818e-5) * r + + 7.868691311456132591e-4) * r + .0148753612908506148525) + * r + .13692988092273580531) * r + + .59983220655588793769) * r + 1.0); + } + if (q < 0.0) val = -val; + } + + return (mu + sigma * val); + } + private static bool R_Q_P01_boundaries(double p, double _LEFT_, double _RIGHT_, bool lower_tail, bool log_p, out double ans) + { + if (log_p) + { + if (p > 0.0) + { + ans = double.NaN; + return (true); + } + if (p == 0.0) + { + ans = lower_tail ? _RIGHT_ : _LEFT_; + return (true); + } + if (p == double.NegativeInfinity) + { + ans = lower_tail ? _LEFT_ : _RIGHT_; + return (true); + } + } + else + { + if (p < 0.0 || p > 1.0) + { + ans = double.NaN; + return (true); + } + if (p == 0.0) + { + ans = lower_tail ? _LEFT_ : _RIGHT_; + return (true); + } + if (p == 1.0) + { + ans = lower_tail ? _RIGHT_ : _LEFT_; + return (true); + } + } + ans = double.NaN; + return (false); + } + + private static double R_DT_qIv(double p, bool lower_tail, bool log_p) + { + return (log_p ? (lower_tail ? Math.Exp(p) : -ExpM1(p)) : R_D_Lval(p, lower_tail)); + } + + private static double R_DT_CIv(double p, bool lower_tail, bool log_p) + { + return (log_p ? (lower_tail ? -ExpM1(p) : Math.Exp(p)) : R_D_Cval(p, lower_tail)); + } + + private static double R_D_Lval(double p, bool lower_tail) + { + return lower_tail ? p : 0.5 - p + 0.5; + } + + private static double R_D_Cval(double p, bool lower_tail) + { + return lower_tail ? 0.5 - p + 0.5 : p; + } + private static double ExpM1(double x) + { + if (Math.Abs(x) < 1e-5) + return x + 0.5 * x * x; + else + return Math.Exp(x) - 1.0; + } + } +} diff --git a/src/SampleApp/Program.cs b/src/SampleApp/Program.cs index 12dd84c..b79501a 100644 --- a/src/SampleApp/Program.cs +++ b/src/SampleApp/Program.cs @@ -1,4 +1,5 @@ using ManagedObjectSize; +using ManagedObjectSize.ObjectPool; using System.Diagnostics; namespace SampleApp @@ -13,17 +14,36 @@ static void Main(string[] args) Console.WriteLine("Object created: " + sw.Elapsed); Console.Out.Flush(); + + sw = Stopwatch.StartNew(); long size = ObjectSize.GetObjectInclusiveSize(graph); sw.Stop(); - Console.WriteLine("Full: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + Console.WriteLine("Full: " + size.ToString("N0") + " bytes : " + sw.Elapsed); sw = Stopwatch.StartNew(); size = ObjectSize.GetObjectInclusiveSize(graph, new ObjectSizeOptions { - ArraySampleCount = 1000 + ArraySampleCount = 1000, + }); + sw.Stop(); + Console.WriteLine("Sample: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + + sw = Stopwatch.StartNew(); + size = ObjectSize.GetObjectInclusiveSize(graph, new() + { + PoolProvider = new MicrosoftExtensionsObjectPoolPoolProvider() + }); + sw.Stop(); + Console.WriteLine("Full (pooled): " + size.ToString("N0") + " bytes : " + sw.Elapsed); + + sw = Stopwatch.StartNew(); + size = ObjectSize.GetObjectInclusiveSize(graph, new() + { + ArraySampleCount = 1000, + PoolProvider = new MicrosoftExtensionsObjectPoolPoolProvider() }); sw.Stop(); - Console.WriteLine("Sample: " + size.ToString("N0") + " bytes : " + sw.Elapsed); + Console.WriteLine("Sample (pooled): " + size.ToString("N0") + " bytes : " + sw.Elapsed); } #if false diff --git a/src/SampleApp/SampleApp.csproj b/src/SampleApp/SampleApp.csproj index 887766a..383bf4c 100644 --- a/src/SampleApp/SampleApp.csproj +++ b/src/SampleApp/SampleApp.csproj @@ -1,14 +1,15 @@ - + - - Exe - net6.0 - enable - enable - + + Exe + net6.0 + enable + enable + - - - + + + +