diff --git a/.azure/pipelines/azure-pipelines-external-release.yml b/.azure/pipelines/azure-pipelines-external-release.yml index 218e0d98ed..a7de9c0fcd 100644 --- a/.azure/pipelines/azure-pipelines-external-release.yml +++ b/.azure/pipelines/azure-pipelines-external-release.yml @@ -1,9 +1,9 @@ ###################################### # NOTE: Before running this pipeline to generate a new nuget package, update the version string in two places # 1) update the name: string below (line 6) -- this is the version for the nuget package (e.g. 1.0.0) -# 2) update \libs\host\GarnetServer.cs readonly string version (~line 45) -- NOTE - these two values need to be the same +# 2) update \libs\host\GarnetServer.cs readonly string version (~line 53) -- NOTE - these two values need to be the same ###################################### -name: 1.0.19 +name: 1.0.30 trigger: branches: include: diff --git a/Directory.Packages.props b/Directory.Packages.props index c025628c1a..df66545fc2 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -25,5 +25,7 @@ + + \ No newline at end of file diff --git a/Garnet.nuspec b/Garnet.nuspec index 7d5d13bae5..199256eea4 100644 --- a/Garnet.nuspec +++ b/Garnet.nuspec @@ -37,6 +37,9 @@ + + + diff --git a/Garnet.sln b/Garnet.sln index d85da9228e..1a7f8dffd2 100644 --- a/Garnet.sln +++ b/Garnet.sln @@ -97,8 +97,17 @@ EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SampleModule", "playground\SampleModule\SampleModule.csproj", "{A8CA619E-8F13-4EF8-943F-2D5E3FEBFB3F}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GarnetJSON", "playground\GarnetJSON\GarnetJSON.csproj", "{2C8F1F5D-31E5-4D00-A46E-F3B1D9BC098F}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MigrateBench", "playground\MigrateBench\MigrateBench.csproj", "{6B66B394-E410-4B61-9A5A-1595FF6F5E08}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "hosting", "hosting", "{01823EA4-4446-4D66-B268-DFEE55951964}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Windows", "Windows", "{697766CD-2046-46D9-958A-0FD3B46C98D4}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Garnet.worker", "hosting\Windows\Garnet.worker\Garnet.worker.csproj", "{DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Garnet.resources", "libs\resources\Garnet.resources.csproj", "{A48412B4-FD60-467E-A5D9-F155CAB4F907}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -299,6 +308,22 @@ Global {6B66B394-E410-4B61-9A5A-1595FF6F5E08}.Release|Any CPU.Build.0 = Release|Any CPU {6B66B394-E410-4B61-9A5A-1595FF6F5E08}.Release|x64.ActiveCfg = Release|Any CPU {6B66B394-E410-4B61-9A5A-1595FF6F5E08}.Release|x64.Build.0 = Release|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Debug|x64.ActiveCfg = Debug|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Debug|x64.Build.0 = Debug|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Release|Any CPU.Build.0 = Release|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Release|x64.ActiveCfg = Release|Any CPU + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5}.Release|x64.Build.0 = Release|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Debug|x64.ActiveCfg = Debug|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Debug|x64.Build.0 = Debug|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Release|Any CPU.Build.0 = Release|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Release|x64.ActiveCfg = Release|Any CPU + {A48412B4-FD60-467E-A5D9-F155CAB4F907}.Release|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -330,6 +355,9 @@ Global {A8CA619E-8F13-4EF8-943F-2D5E3FEBFB3F} = {69A71E2C-00E3-42F3-854E-BE157A24834E} {2C8F1F5D-31E5-4D00-A46E-F3B1D9BC098F} = {69A71E2C-00E3-42F3-854E-BE157A24834E} {6B66B394-E410-4B61-9A5A-1595FF6F5E08} = {69A71E2C-00E3-42F3-854E-BE157A24834E} + {697766CD-2046-46D9-958A-0FD3B46C98D4} = {01823EA4-4446-4D66-B268-DFEE55951964} + {DF2DD03E-87EE-482A-9FBA-6C8FBC23BDC5} = {697766CD-2046-46D9-958A-0FD3B46C98D4} + {A48412B4-FD60-467E-A5D9-F155CAB4F907} = {147FCE31-EC09-4C90-8E4D-37CA87ED18C3} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {2C02C405-4798-41CA-AF98-61EDFEF6772E} diff --git a/benchmark/BDN.benchmark/Resp/RespAofStress.cs b/benchmark/BDN.benchmark/Resp/RespAofStress.cs new file mode 100644 index 0000000000..7e52feec90 --- /dev/null +++ b/benchmark/BDN.benchmark/Resp/RespAofStress.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System.Runtime.CompilerServices; +using BenchmarkDotNet.Attributes; +using Embedded.perftest; +using Garnet.server; + +namespace BDN.benchmark.Resp +{ + [MemoryDiagnoser] + public unsafe class RespAofStress + { + EmbeddedRespServer server; + RespServerSession session; + const int batchSize = 128; + + static ReadOnlySpan SET => "*3\r\n$3\r\nSET\r\n$1\r\na\r\n$1\r\na\r\n"u8; + byte[] setRequestBuffer; + byte* setRequestBufferPointer; + + static ReadOnlySpan INCR => "*2\r\n$4\r\nINCR\r\n$1\r\ni\r\n"u8; + byte[] incrRequestBuffer; + byte* incrRequestBufferPointer; + + static ReadOnlySpan LPUSHPOP => "*3\r\n$5\r\nLPUSH\r\n$1\r\nd\r\n$1\r\ne\r\n*2\r\n$4\r\nLPOP\r\n$1\r\nd\r\n"u8; + byte[] lPushPopRequestBuffer; + byte* lPushPopRequestBufferPointer; + + [GlobalSetup] + public void GlobalSetup() + { + var opt = new GarnetServerOptions + { + QuietMode = true, + EnableAOF = true, + UseAofNullDevice = true, + MainMemoryReplication = true, + CommitFrequencyMs = -1, + AofPageSize = "128m", + AofMemorySize = "256m", + }; + server = new EmbeddedRespServer(opt); + + session = server.GetRespSession(); + + setRequestBuffer = GC.AllocateArray(SET.Length * batchSize, pinned: true); + setRequestBufferPointer = (byte*)Unsafe.AsPointer(ref setRequestBuffer[0]); + for (int i = 0; i < batchSize; i++) + SET.CopyTo(new Span(setRequestBuffer).Slice(i * SET.Length)); + + _ = session.TryConsumeMessages(setRequestBufferPointer, setRequestBuffer.Length); + + incrRequestBuffer = GC.AllocateArray(INCR.Length * batchSize, pinned: true); + incrRequestBufferPointer = (byte*)Unsafe.AsPointer(ref incrRequestBuffer[0]); + for (int i = 0; i < batchSize; i++) + INCR.CopyTo(new Span(incrRequestBuffer).Slice(i * INCR.Length)); + + _ = session.TryConsumeMessages(incrRequestBufferPointer, incrRequestBuffer.Length); + + lPushPopRequestBuffer = GC.AllocateArray(LPUSHPOP.Length * batchSize, pinned: true); + lPushPopRequestBufferPointer = (byte*)Unsafe.AsPointer(ref lPushPopRequestBuffer[0]); + for (int i = 0; i < batchSize; i++) + LPUSHPOP.CopyTo(new Span(lPushPopRequestBuffer).Slice(i * LPUSHPOP.Length)); + + // Pre-populate list with a single element to avoid repeatedly emptying it during the benchmark + SlowConsumeMessage("*3\r\n$5\r\nLPUSH\r\n$1\r\nd\r\n$1\r\nf\r\n"u8); + } + + [GlobalCleanup] + public void GlobalCleanup() + { + session.Dispose(); + server.Dispose(); + } + + [Benchmark] + public void Set() + { + _ = session.TryConsumeMessages(setRequestBufferPointer, setRequestBuffer.Length); + } + + [Benchmark] + public void Increment() + { + _ = session.TryConsumeMessages(incrRequestBufferPointer, incrRequestBuffer.Length); + } + + [Benchmark] + public void LPushPop() + { + _ = session.TryConsumeMessages(lPushPopRequestBufferPointer, lPushPopRequestBuffer.Length); + } + + private void SlowConsumeMessage(ReadOnlySpan message) + { + var buffer = GC.AllocateArray(message.Length, pinned: true); + var bufferPointer = (byte*)Unsafe.AsPointer(ref buffer[0]); + message.CopyTo(new Span(buffer)); + _ = session.TryConsumeMessages(bufferPointer, buffer.Length); + } + } +} \ No newline at end of file diff --git a/benchmark/BDN.benchmark/Resp/RespParseStress.cs b/benchmark/BDN.benchmark/Resp/RespParseStress.cs index 19ea1f41dd..a46bc8c3af 100644 --- a/benchmark/BDN.benchmark/Resp/RespParseStress.cs +++ b/benchmark/BDN.benchmark/Resp/RespParseStress.cs @@ -35,6 +35,10 @@ public unsafe class RespParseStress byte[] getRequestBuffer; byte* getRequestBufferPointer; + static ReadOnlySpan INCR => "*2\r\n$4\r\nINCR\r\n$1\r\ni\r\n"u8; + byte[] incrRequestBuffer; + byte* incrRequestBufferPointer; + static ReadOnlySpan ZADDREM => "*4\r\n$4\r\nZADD\r\n$1\r\nc\r\n$1\r\n1\r\n$1\r\nc\r\n*3\r\n$4\r\nZREM\r\n$1\r\nc\r\n$1\r\nc\r\n"u8; byte[] zAddRemRequestBuffer; byte* zAddRemRequestBufferPointer; @@ -92,6 +96,11 @@ public void GlobalSetup() for (int i = 0; i < batchSize; i++) GET.CopyTo(new Span(getRequestBuffer).Slice(i * GET.Length)); + incrRequestBuffer = GC.AllocateArray(INCR.Length * batchSize, pinned: true); + incrRequestBufferPointer = (byte*)Unsafe.AsPointer(ref incrRequestBuffer[0]); + for (int i = 0; i < batchSize; i++) + INCR.CopyTo(new Span(incrRequestBuffer).Slice(i * INCR.Length)); + zAddRemRequestBuffer = GC.AllocateArray(ZADDREM.Length * batchSize, pinned: true); zAddRemRequestBufferPointer = (byte*)Unsafe.AsPointer(ref zAddRemRequestBuffer[0]); for (int i = 0; i < batchSize; i++) @@ -116,7 +125,7 @@ public void GlobalSetup() SlowConsumeMessage("*4\r\n$4\r\nZADD\r\n$1\r\nc\r\n$1\r\n1\r\n$1\r\nd\r\n"u8); // Pre-populate list with a single element to avoid repeatedly emptying it during the benchmark - SlowConsumeMessage("*3\r\n$4\r\nLPUSH\r\n$1\r\nd\r\n$1\r\nf\r\n"u8); + SlowConsumeMessage("*3\r\n$5\r\nLPUSH\r\n$1\r\nd\r\n$1\r\nf\r\n"u8); // Pre-populate set with a single element to avoid repeatedly emptying it during the benchmark SlowConsumeMessage("*3\r\n$4\r\nSADD\r\n$1\r\ne\r\n$1\r\nb\r\n"u8); @@ -164,6 +173,12 @@ public void Get() _ = session.TryConsumeMessages(getRequestBufferPointer, getRequestBuffer.Length); } + [Benchmark] + public void Increment() + { + _ = session.TryConsumeMessages(incrRequestBufferPointer, incrRequestBuffer.Length); + } + [Benchmark] public void ZAddRem() { diff --git a/benchmark/Resp.benchmark/Program.cs b/benchmark/Resp.benchmark/Program.cs index 6406cadcda..cb3a151e8d 100644 --- a/benchmark/Resp.benchmark/Program.cs +++ b/benchmark/Resp.benchmark/Program.cs @@ -195,7 +195,7 @@ static void Main(string[] args) static void WaitForServer(Options opts) { - using var client = new GarnetClientSession(opts.Address, opts.Port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); + using var client = new GarnetClientSession(opts.Address, opts.Port, new(), tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); while (true) { try diff --git a/benchmark/Resp.benchmark/RespOnlineBench.cs b/benchmark/Resp.benchmark/RespOnlineBench.cs index 84a751fe40..6b89212f5a 100644 --- a/benchmark/Resp.benchmark/RespOnlineBench.cs +++ b/benchmark/Resp.benchmark/RespOnlineBench.cs @@ -157,7 +157,7 @@ private void InitializeClients() { gcsPool = new AsyncPool(opts.NumThreads.First(), () => { - var c = new GarnetClientSession(address, port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); + var c = new GarnetClientSession(address, port, new(), tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); c.Connect(); if (auth != null) { @@ -573,8 +573,8 @@ public async void OpRunnerGarnetClientSession(int thread_id) client = new GarnetClientSession( address, port, - opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null, - bufferSize: Math.Max(bufferSizeValue, opts.ValueLength * opts.IntraThreadParallelism)); + new(Math.Max(bufferSizeValue, opts.ValueLength * opts.IntraThreadParallelism)), + tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); client.Connect(); if (auth != null) { @@ -669,7 +669,11 @@ public async void OpRunnerGarnetClientSessionParallel(int thread_id, int paralle GarnetClientSession client = null; if (!opts.Pool) { - client = new GarnetClientSession(address, port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null, null, null, Math.Max(131072, opts.IntraThreadParallelism * opts.ValueLength)); + client = new GarnetClientSession( + address, + port, + new NetworkBufferSettings(Math.Max(131072, opts.IntraThreadParallelism * opts.ValueLength)), + tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); client.Connect(); if (auth != null) { diff --git a/benchmark/Resp.benchmark/RespPerfBench.cs b/benchmark/Resp.benchmark/RespPerfBench.cs index 1fc6898907..74c535104a 100644 --- a/benchmark/Resp.benchmark/RespPerfBench.cs +++ b/benchmark/Resp.benchmark/RespPerfBench.cs @@ -407,7 +407,7 @@ private void GarnetClientSessionOperateThreadRunner(int NumOps, OpType opType, R default: throw new Exception($"opType: {opType} benchmark not supported with GarnetClientSession!"); } - var c = new GarnetClientSession(opts.Address, opts.Port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); + var c = new GarnetClientSession(opts.Address, opts.Port, new(), tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); c.Connect(); if (opts.Auth != null) { diff --git a/benchmark/Resp.benchmark/TxnPerfBench.cs b/benchmark/Resp.benchmark/TxnPerfBench.cs index 9670f8a5b7..89fa74e759 100644 --- a/benchmark/Resp.benchmark/TxnPerfBench.cs +++ b/benchmark/Resp.benchmark/TxnPerfBench.cs @@ -107,7 +107,7 @@ public void Run() { gcsPool = new AsyncPool(opts.NumThreads.First(), () => { - var c = new GarnetClientSession(address, port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); + var c = new GarnetClientSession(address, port, new(), tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); c.Connect(); if (auth != null) { @@ -325,7 +325,7 @@ public void OpRunnerSERedis(int thread_id) public void LoadData() { var req = new OnlineReqGen(0, opts.DbSize, true, opts.Zipf, opts.KeyLength, opts.ValueLength); - GarnetClientSession client = new(address, port, opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); + GarnetClientSession client = new(address, port, new(), tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); client.Connect(); if (auth != null) { diff --git a/hosting/Windows/Garnet.worker/Garnet.worker.csproj b/hosting/Windows/Garnet.worker/Garnet.worker.csproj new file mode 100644 index 0000000000..4f639c7e12 --- /dev/null +++ b/hosting/Windows/Garnet.worker/Garnet.worker.csproj @@ -0,0 +1,16 @@ + + + + net8.0 + + + + + + + + + + + + diff --git a/hosting/Windows/Garnet.worker/Program.cs b/hosting/Windows/Garnet.worker/Program.cs new file mode 100644 index 0000000000..8418da8671 --- /dev/null +++ b/hosting/Windows/Garnet.worker/Program.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using Garnet; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +class Program +{ + static void Main(string[] args) + { + var builder = Host.CreateApplicationBuilder(args); + builder.Services.AddHostedService(_ => new Worker(args)); + + builder.Services.AddWindowsService(options => + { + options.ServiceName = "Microsoft Garnet Server"; + }); + + var host = builder.Build(); + host.Run(); + } +} \ No newline at end of file diff --git a/hosting/Windows/Garnet.worker/Worker.cs b/hosting/Windows/Garnet.worker/Worker.cs new file mode 100644 index 0000000000..d69adb7e3c --- /dev/null +++ b/hosting/Windows/Garnet.worker/Worker.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; + +namespace Garnet +{ + public class Worker : BackgroundService + { + private bool _isDisposed = false; + private readonly string[] args; + + private GarnetServer server; + + public Worker(string[] args) + { + this.args = args; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + try + { + server = new GarnetServer(args); + + // Start the server + server.Start(); + + await Task.Delay(Timeout.Infinite, stoppingToken).ConfigureAwait(false); + } + catch (Exception ex) + { + Console.WriteLine($"Unable to initialize server due to exception: {ex.Message}"); + } + } + + /// + /// Triggered when the application host is performing a graceful shutdown. + /// + /// Indicates that the shutdown process should no longer be graceful. + public override async Task StopAsync(CancellationToken cancellationToken) + { + Dispose(); + await base.StopAsync(cancellationToken).ConfigureAwait(false); + } + + public override void Dispose() + { + if (_isDisposed) + { + return; + } + server?.Dispose(); + _isDisposed = true; + } + } +} \ No newline at end of file diff --git a/libs/client/ClientSession/GarnetClientSession.cs b/libs/client/ClientSession/GarnetClientSession.cs index 70622fe4e6..92aa7951fc 100644 --- a/libs/client/ClientSession/GarnetClientSession.cs +++ b/libs/client/ClientSession/GarnetClientSession.cs @@ -23,7 +23,6 @@ public sealed unsafe partial class GarnetClientSession : IServerHook, IMessageCo { readonly string address; readonly int port; - readonly int bufferSize; readonly int bufferSizeDigits; INetworkSender networkSender; readonly ElasticCircularBuffer tasksTypes = new(); @@ -61,8 +60,6 @@ public sealed unsafe partial class GarnetClientSession : IServerHook, IMessageCo /// public bool IsConnected => socket != null && socket.Connected && !Disposed; - readonly LimitedFixedBufferPool networkPool; - /// /// Username to authenticate the session on the server. /// @@ -73,6 +70,21 @@ public sealed unsafe partial class GarnetClientSession : IServerHook, IMessageCo /// readonly string authPassword = null; + /// + /// Indicating whether this instance is using its own network pool or one that was provided + /// + readonly bool usingManagedNetworkPool = false; + + /// + /// Instance of network buffer settings describing the send and receive buffer sizes + /// + readonly NetworkBufferSettings networkBufferSettings; + + /// + /// NetworkPool used to allocate send and receive buffers + /// + readonly LimitedFixedBufferPool networkPool; + /// /// Create client instance /// @@ -81,16 +93,29 @@ public sealed unsafe partial class GarnetClientSession : IServerHook, IMessageCo /// TLS options /// Username to authenticate with /// Password to authenticate with - /// Network buffer size + /// Settings for send and receive network buffers + /// Buffer pool to use for allocating send and receive buffers /// Max outstanding network sends allowed /// Logger - public GarnetClientSession(string address, int port, SslClientAuthenticationOptions tlsOptions = null, string authUsername = null, string authPassword = null, int bufferSize = 1 << 17, int networkSendThrottleMax = 8, ILogger logger = null) + public GarnetClientSession( + string address, + int port, + NetworkBufferSettings networkBufferSettings, + LimitedFixedBufferPool networkPool = null, + SslClientAuthenticationOptions tlsOptions = null, + string authUsername = null, + string authPassword = null, + int networkSendThrottleMax = 8, + ILogger logger = null) { - this.networkPool = new LimitedFixedBufferPool(bufferSize, logger: logger); this.address = address; this.port = port; - this.bufferSize = bufferSize; - this.bufferSizeDigits = NumUtils.NumDigits(bufferSize); + + this.usingManagedNetworkPool = networkPool != null; + this.networkBufferSettings = networkBufferSettings; + this.networkPool = networkPool ?? networkBufferSettings.CreateBufferPool(); + this.bufferSizeDigits = NumUtils.NumDigits(this.networkBufferSettings.sendBufferSize); + this.logger = logger; this.sslOptions = tlsOptions; this.networkSendThrottleMax = networkSendThrottleMax; @@ -107,7 +132,15 @@ public GarnetClientSession(string address, int port, SslClientAuthenticationOpti public void Connect(int timeoutMs = 0, CancellationToken token = default) { socket = GetSendSocket(address, port, timeoutMs); - networkHandler = new GarnetClientSessionTcpNetworkHandler(this, socket, networkPool, sslOptions != null, this, networkSendThrottleMax, logger); + networkHandler = new GarnetClientSessionTcpNetworkHandler( + this, + socket, + networkBufferSettings, + networkPool, + sslOptions != null, + messageConsumer: this, + networkSendThrottleMax: networkSendThrottleMax, + logger: logger); networkHandler.StartAsync(sslOptions, $"{address}:{port}", token).ConfigureAwait(false).GetAwaiter().GetResult(); networkSender = networkHandler.GetNetworkSender(); networkSender.GetResponseObject(); @@ -159,7 +192,7 @@ public void Dispose() networkSender?.ReturnResponseObject(); socket?.Dispose(); networkHandler?.Dispose(); - networkPool.Dispose(); + if (!usingManagedNetworkPool) networkPool.Dispose(); } /// @@ -259,8 +292,8 @@ public void ExecuteClusterAppendLog(string nodeId, long previousAddress, long cu } offset = curr; - if (payloadLength > bufferSize) - throw new Exception($"Payload length {payloadLength} is larger than bufferSize {bufferSize} bytes"); + if (payloadLength > networkBufferSettings.sendBufferSize) + throw new Exception($"Payload length {payloadLength} is larger than bufferSize {networkBufferSettings.sendBufferSize} bytes"); while (!RespWriteUtils.WriteBulkString(new Span((void*)payloadPtr, payloadLength), ref curr, end)) { diff --git a/libs/client/ClientSession/GarnetClientSessionTcpNetworkHandler.cs b/libs/client/ClientSession/GarnetClientSessionTcpNetworkHandler.cs index 2398223023..4351eb683d 100644 --- a/libs/client/ClientSession/GarnetClientSessionTcpNetworkHandler.cs +++ b/libs/client/ClientSession/GarnetClientSessionTcpNetworkHandler.cs @@ -10,8 +10,8 @@ namespace Garnet.client { sealed class GarnetClientSessionTcpNetworkHandler : TcpNetworkHandlerBase { - public GarnetClientSessionTcpNetworkHandler(GarnetClientSession serverHook, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) - : base(serverHook, new GarnetTcpNetworkSender(socket, networkPool, networkSendThrottleMax), socket, networkPool, useTLS, messageConsumer, logger) + public GarnetClientSessionTcpNetworkHandler(GarnetClientSession serverHook, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) + : base(serverHook, new GarnetTcpNetworkSender(socket, networkBufferSettings, networkPool, networkSendThrottleMax), socket, networkBufferSettings, networkPool, useTLS, messageConsumer: messageConsumer, logger: logger) { } diff --git a/libs/client/ClientTcpNetworkSender.cs b/libs/client/ClientTcpNetworkSender.cs index c15d300b17..a8061a8cb0 100644 --- a/libs/client/ClientTcpNetworkSender.cs +++ b/libs/client/ClientTcpNetworkSender.cs @@ -22,10 +22,11 @@ public class ClientTcpNetworkSender : GarnetTcpNetworkSender /// /// /// + /// /// /// - public ClientTcpNetworkSender(Socket socket, Action callback, LimitedFixedBufferPool networkPool, int networkSendThrottleMax) - : base(socket, networkPool, networkSendThrottleMax) + public ClientTcpNetworkSender(Socket socket, Action callback, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, int networkSendThrottleMax) + : base(socket, networkBufferSettings, networkPool, networkSendThrottleMax) { this.callback = callback; this.reusableSaea = new SimpleObjectPool(() => diff --git a/libs/client/GarnetClient.cs b/libs/client/GarnetClient.cs index ddbe5d447a..2edabf7627 100644 --- a/libs/client/GarnetClient.cs +++ b/libs/client/GarnetClient.cs @@ -5,6 +5,7 @@ using System.Buffers; using System.Collections.Generic; using System.Diagnostics; +using System.Net; using System.Net.Security; using System.Net.Sockets; using System.Runtime.CompilerServices; @@ -46,6 +47,7 @@ public sealed partial class GarnetClient : IServerHook, IMessageConsumer, IDispo readonly string address; readonly int port; readonly int sendPageSize; + readonly int bufferSize; readonly int maxOutstandingTasks; NetworkWriter networkWriter; INetworkSender networkSender; @@ -132,6 +134,7 @@ public GarnetClient( string authUsername = null, string authPassword = null, int sendPageSize = 1 << 21, + int bufferSize = 1 << 17, int maxOutstandingTasks = 1 << 19, int timeoutMilliseconds = 0, MemoryPool memoryPool = null, @@ -143,6 +146,7 @@ public GarnetClient( this.address = address; this.port = port; this.sendPageSize = (int)Utility.PreviousPowerOf2(sendPageSize); + this.bufferSize = bufferSize; this.authUsername = authUsername; this.authPassword = authPassword; @@ -185,7 +189,7 @@ public GarnetClient( public void Connect(CancellationToken token = default) { socket = GetSendSocket(timeoutMilliseconds); - networkWriter = new NetworkWriter(this, socket, 1 << 17, sslOptions, out networkHandler, sendPageSize, networkSendThrottleMax, logger); + networkWriter = new NetworkWriter(this, socket, bufferSize, sslOptions, out networkHandler, sendPageSize, networkSendThrottleMax, logger); networkHandler.StartAsync(sslOptions, $"{address}:{port}", token).ConfigureAwait(false).GetAwaiter().GetResult(); networkSender = networkHandler.GetNetworkSender(); @@ -218,7 +222,7 @@ public void Connect(CancellationToken token = default) public async Task ConnectAsync(CancellationToken token = default) { socket = GetSendSocket(timeoutMilliseconds); - networkWriter = new NetworkWriter(this, socket, 1 << 17, sslOptions, out networkHandler, sendPageSize, networkSendThrottleMax, logger); + networkWriter = new NetworkWriter(this, socket, bufferSize, sslOptions, out networkHandler, sendPageSize, networkSendThrottleMax, logger); await networkHandler.StartAsync(sslOptions, $"{address}:{port}", token).ConfigureAwait(false); networkSender = networkHandler.GetNetworkSender(); @@ -247,7 +251,8 @@ public async Task ConnectAsync(CancellationToken token = default) Socket GetSendSocket(int millisecondsTimeout = 0) { - _ = Format.TryParseEndPoint($"{address}:{port}", out var endPoint); + var ip = IPAddress.Parse(address); + var endPoint = new IPEndPoint(ip, port); var socket = new Socket(endPoint.AddressFamily, SocketType.Stream, ProtocolType.Tcp) { @@ -256,7 +261,7 @@ Socket GetSendSocket(int millisecondsTimeout = 0) if (millisecondsTimeout > 0) { - IAsyncResult result = socket.BeginConnect(endPoint, null, null); + var result = socket.BeginConnect(endPoint, null, null); result.AsyncWaitHandle.WaitOne(millisecondsTimeout, true); if (socket.Connected) diff --git a/libs/client/GarnetClientTcpNetworkHandler.cs b/libs/client/GarnetClientTcpNetworkHandler.cs index 25a64f01f0..4aad6a9df6 100644 --- a/libs/client/GarnetClientTcpNetworkHandler.cs +++ b/libs/client/GarnetClientTcpNetworkHandler.cs @@ -11,8 +11,8 @@ namespace Garnet.client { sealed class GarnetClientTcpNetworkHandler : TcpNetworkHandlerBase { - public GarnetClientTcpNetworkHandler(GarnetClient serverHook, Action callback, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) - : base(serverHook, new ClientTcpNetworkSender(socket, callback, networkPool, networkSendThrottleMax), socket, networkPool, useTLS, messageConsumer, logger) + public GarnetClientTcpNetworkHandler(GarnetClient serverHook, Action callback, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) + : base(serverHook, new ClientTcpNetworkSender(socket, callback, networkBufferSettings, networkPool, networkSendThrottleMax), socket, networkBufferSettings, networkPool, useTLS, messageConsumer: messageConsumer, logger: logger) { } diff --git a/libs/client/NetworkWriter.cs b/libs/client/NetworkWriter.cs index 2faaf953e7..e9654e9d98 100644 --- a/libs/client/NetworkWriter.cs +++ b/libs/client/NetworkWriter.cs @@ -72,6 +72,7 @@ internal sealed class NetworkWriter : IDisposable /// CompletionEvent FlushEvent; + readonly NetworkBufferSettings networkBufferSettings; readonly LimitedFixedBufferPool networkPool; readonly GarnetClientTcpNetworkHandler networkHandler; @@ -80,10 +81,11 @@ internal sealed class NetworkWriter : IDisposable /// public NetworkWriter(GarnetClient serverHook, Socket socket, int messageBufferSize, SslClientAuthenticationOptions sslOptions, out GarnetClientTcpNetworkHandler networkHandler, int sendPageSize, int networkSendThrottleMax, ILogger logger = null) { - this.networkPool = new LimitedFixedBufferPool(messageBufferSize, logger: logger); + this.networkBufferSettings = new NetworkBufferSettings(messageBufferSize, messageBufferSize); + this.networkPool = networkBufferSettings.CreateBufferPool(logger: logger); if (BufferSize > PageOffset.kPageMask) throw new Exception(); - this.networkHandler = networkHandler = new GarnetClientTcpNetworkHandler(serverHook, AsyncFlushPageCallback, socket, networkPool, sslOptions != null, serverHook, networkSendThrottleMax, logger); + this.networkHandler = networkHandler = new GarnetClientTcpNetworkHandler(serverHook, AsyncFlushPageCallback, socket, networkBufferSettings, networkPool, sslOptions != null, serverHook, networkSendThrottleMax: networkSendThrottleMax, logger: logger); networkSender = networkHandler.GetNetworkSender(); FlushEvent.Initialize(); @@ -109,7 +111,7 @@ public void Dispose() FlushEvent.Dispose(); epoch.Dispose(); networkHandler.Dispose(); - networkPool.Dispose(); + networkPool?.Dispose(); } /// diff --git a/libs/cluster/CmdStrings.cs b/libs/cluster/CmdStrings.cs index f6ee4347f9..9b8d3a765d 100644 --- a/libs/cluster/CmdStrings.cs +++ b/libs/cluster/CmdStrings.cs @@ -52,6 +52,7 @@ static class CmdStrings public static ReadOnlySpan RESP_ERR_INVALID_SLOT => "ERR Invalid or out of range slot"u8; public static ReadOnlySpan RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER => "ERR value is not an integer or out of range."u8; public static ReadOnlySpan RESP_ERR_GENERIC_VALUE_IS_NOT_BOOLEAN => "ERR value is not a boolean."u8; + public static ReadOnlySpan RESP_ERR_RESET_WITH_KEYS_ASSIGNED => "-ERR CLUSTER RESET can't be called with master nodes containing keys\r\n"u8; public static ReadOnlySpan RESP_SYNTAX_ERROR => "ERR syntax error"u8; /// diff --git a/libs/cluster/Server/ClusterConfig.cs b/libs/cluster/Server/ClusterConfig.cs index 4ce43ee449..56c418113d 100644 --- a/libs/cluster/Server/ClusterConfig.cs +++ b/libs/cluster/Server/ClusterConfig.cs @@ -319,7 +319,7 @@ public List GetRemoteNodeIds() /// /// /// Integer representing offset of worker in worker list. - public int GetWorkerIdFromNodeId(string nodeId) + public ushort GetWorkerIdFromNodeId(string nodeId) { for (ushort i = 1; i <= NumWorkers; i++) { @@ -733,7 +733,11 @@ private string GetSlotRange(ushort workerId) return result; } - private List GetSlotList(ushort workerId) + /// + /// Retrieve a list of slots served by this node. + /// + /// List of slots. + public List GetSlotList(ushort workerId) { List result = []; for (var i = 0; i < MAX_HASH_SLOT_VALUE; i++) diff --git a/libs/cluster/Server/ClusterManager.cs b/libs/cluster/Server/ClusterManager.cs index 619c54d326..742fc47bd5 100644 --- a/libs/cluster/Server/ClusterManager.cs +++ b/libs/cluster/Server/ClusterManager.cs @@ -4,7 +4,6 @@ using System; using System.Threading; using Garnet.common; -using Garnet.server; using Garnet.server.TLS; using Microsoft.Extensions.Logging; using Tsavorite.core; @@ -50,7 +49,7 @@ public unsafe ClusterManager(ClusterProvider clusterProvider, ILogger logger = n clusterConfigDevice = deviceFactory.Get(new FileDescriptor(directoryName: "", fileName: "nodes.conf")); pool = new(1, (int)clusterConfigDevice.SectorSize); - var address = opts.Address ?? StoreWrapper.GetIp(); + var address = clusterProvider.storeWrapper.GetIp(); this.logger = logger; var recoverConfig = clusterConfigDevice.GetFileSize(0) > 0 && !opts.CleanClusterConfig; diff --git a/libs/cluster/Server/ClusterManagerWorkerState.cs b/libs/cluster/Server/ClusterManagerWorkerState.cs index eb6eea67cd..309d102c44 100644 --- a/libs/cluster/Server/ClusterManagerWorkerState.cs +++ b/libs/cluster/Server/ClusterManagerWorkerState.cs @@ -100,19 +100,23 @@ public ReadOnlySpan TryReset(bool soft, int expirySeconds = 60) { PauseConfigMerge(); var resp = CmdStrings.RESP_OK; - while (true) { var current = currentConfig; + var localSlots = current.GetSlotList(1); + if (clusterProvider.storeWrapper.HasKeysInSlots(localSlots)) + { + return CmdStrings.RESP_ERR_RESET_WITH_KEYS_ASSIGNED; + } + + this.clusterConnectionStore.CloseAll(); + var newNodeId = soft ? current.LocalNodeId : Generator.CreateHexId(); var address = current.LocalNodeIp; var port = current.LocalNodePort; var configEpoch = soft ? current.LocalNodeConfigEpoch : 0; var expiry = DateTimeOffset.UtcNow.Ticks + TimeSpan.FromSeconds(expirySeconds).Ticks; - foreach (var nodeId in current.GetRemoteNodeIds()) - _ = workerBanList.AddOrUpdate(nodeId, expiry, (key, oldValue) => expiry); - var newConfig = new ClusterConfig().InitializeLocalWorker( newNodeId, address, diff --git a/libs/cluster/Server/ClusterProvider.cs b/libs/cluster/Server/ClusterProvider.cs index 2f8218c1a8..9b8ac12a22 100644 --- a/libs/cluster/Server/ClusterProvider.cs +++ b/libs/cluster/Server/ClusterProvider.cs @@ -119,6 +119,18 @@ public void Dispose() public bool IsReplica() => clusterManager?.CurrentConfig.LocalNodeRole == NodeRole.REPLICA || replicationManager?.Recovering == true; + /// + public bool IsReplica(string nodeId) + { + var config = clusterManager?.CurrentConfig; + if (config is null) + { + return false; + } + + return config.GetNodeRoleFromNodeId(nodeId) == NodeRole.REPLICA; + } + /// public void ResetGossipStats() => clusterManager?.gossipStats.Reset(); @@ -166,7 +178,7 @@ public void SafeTruncateAOF(StoreType storeType, bool full, long CheckpointCover else { if (serverOptions.MainMemoryReplication) - storeWrapper.appendOnlyFile?.UnsafeShiftBeginAddress(CheckpointCoveredAofAddress, truncateLog: true, noFlush: true); + storeWrapper.appendOnlyFile?.UnsafeShiftBeginAddress(CheckpointCoveredAofAddress, truncateLog: true); else { storeWrapper.appendOnlyFile?.TruncateUntil(CheckpointCoveredAofAddress); @@ -255,10 +267,24 @@ public MetricsItem[] GetGossipStats(bool metricsDisabled) new("gossip_full_send", metricsDisabled ? "0" : gossipStats.gossip_full_send.ToString()), new("gossip_empty_send", metricsDisabled ? "0" : gossipStats.gossip_empty_send.ToString()), new("gossip_bytes_send", metricsDisabled ? "0" : gossipStats.gossip_bytes_send.ToString()), - new("gossip_bytes_recv", metricsDisabled ? "0" : gossipStats.gossip_bytes_recv.ToString()) + new("gossip_bytes_recv", metricsDisabled ? "0" : gossipStats.gossip_bytes_recv.ToString()), + new("gossip_open_connections", metricsDisabled ? "0" : this.clusterManager.clusterConnectionStore.Count.ToString()) ]; } + public MetricsItem[] GetBufferPoolStats() + => [new("migration_manager", migrationManager.GetBufferPoolStats()), new("replication_manager", replicationManager.GetBufferPoolStats())]; + + public void PurgeBufferPool(ManagerType managerType) + { + if (managerType == ManagerType.MigrationManager) + migrationManager.Purge(); + else if (managerType == ManagerType.ReplicationManager) + replicationManager.Purge(); + else + throw new GarnetException(); + } + internal ReplicationLogCheckpointManager GetReplicationLogCheckpointManager(StoreType storeType) { Debug.Assert(serverOptions.EnableCluster); @@ -281,7 +307,7 @@ internal ReplicationLogCheckpointManager GetReplicationLogCheckpointManager(Stor /// internal bool BumpAndWaitForEpochTransition() { - var server = storeWrapper.GetServer(); + var server = storeWrapper.GetTcpServer(); BumpCurrentEpoch(); while (true) { diff --git a/libs/cluster/Server/ClusterUtils.cs b/libs/cluster/Server/ClusterUtils.cs index b780fcb52f..1511bec80b 100644 --- a/libs/cluster/Server/ClusterUtils.cs +++ b/libs/cluster/Server/ClusterUtils.cs @@ -95,8 +95,8 @@ public static void IOCallback(this ILogger logger, uint errorCode, uint numBytes { if (errorCode != 0) { - string errorMessage = new Win32Exception((int)errorCode).Message; - logger.LogError("OverlappedStream GetQueuedCompletionStatus error: {errorCode} msg: {errorMessage}", errorCode, errorMessage); + var errorMessage = new Win32Exception((int)errorCode).Message; + logger.LogError("[ClusterUtils] OverlappedStream GetQueuedCompletionStatus error: {errorCode} msg: {errorMessage}", errorCode, errorMessage); } ((SemaphoreSlim)context).Release(); } diff --git a/libs/cluster/Server/Failover/ReplicaFailoverSession.cs b/libs/cluster/Server/Failover/ReplicaFailoverSession.cs index 96dffbceb1..b96687fac2 100644 --- a/libs/cluster/Server/Failover/ReplicaFailoverSession.cs +++ b/libs/cluster/Server/Failover/ReplicaFailoverSession.cs @@ -25,6 +25,11 @@ internal sealed partial class FailoverSession : IDisposable /// bool useGossipConnections = false; + /// + /// Send page size for GarnetClient + /// + const int sendPageSize = 1 << 17; + /// /// Helper method to re-use gossip connection to perform the failover /// @@ -77,6 +82,8 @@ private GarnetClient CreateConnection(string nodeId) address, port, clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, + sendPageSize: sendPageSize, + maxOutstandingTasks: 8, authUsername: clusterProvider.ClusterUsername, authPassword: clusterProvider.ClusterPassword, logger: logger); diff --git a/libs/cluster/Server/GarnetClusterConnectionStore.cs b/libs/cluster/Server/GarnetClusterConnectionStore.cs index 4a99cf0bcd..a78583a3be 100644 --- a/libs/cluster/Server/GarnetClusterConnectionStore.cs +++ b/libs/cluster/Server/GarnetClusterConnectionStore.cs @@ -92,6 +92,29 @@ public bool AddConnection(GarnetServerNode conn) return true; } + public void CloseAll() + { + try + { + _lock.WriteLock(); + + if (_disposed) return; + + for (int i = 0; i < numConnection; i++) + connections[i].Dispose(); + numConnection = 0; + Array.Clear(connections); + } + catch (Exception ex) + { + logger?.LogError(ex, "GarnetConnectionStore.CloseAll"); + } + finally + { + _lock.WriteUnlock(); + } + } + /// /// Remove GarnetServerNode connection object from store. /// diff --git a/libs/cluster/Server/Migration/MigrateSession.cs b/libs/cluster/Server/Migration/MigrateSession.cs index fa784cce7e..04612e8360 100644 --- a/libs/cluster/Server/Migration/MigrateSession.cs +++ b/libs/cluster/Server/Migration/MigrateSession.cs @@ -69,6 +69,16 @@ internal sealed unsafe partial class MigrateSession : IDisposable /// public HashSet GetSlots => _sslots; + /// + /// Get network buffer specs + /// + public NetworkBufferSettings GetNetworkBufferSettings => clusterProvider.migrationManager.GetNetworkBufferSettings; + + /// + /// Get network pool + /// + public LimitedFixedBufferPool GetNetworkPool => clusterProvider.migrationManager.GetNetworkPool; + readonly GarnetClientSession _gcs; /// @@ -79,9 +89,10 @@ internal sealed unsafe partial class MigrateSession : IDisposable public bool Overlap(MigrateSession session) => session._sslots.Overlaps(_sslots); - readonly int _clientBufferSize; - - TransferOption transferOption; + /// + /// Transfer option used for this migrateSession + /// + readonly TransferOption transferOption; /// /// MigrateSession Constructor @@ -138,14 +149,14 @@ internal MigrateSession( Status = MigrateState.PENDING; // Single key value size + few bytes for command header and arguments - _clientBufferSize = 256 + (1 << clusterProvider.serverOptions.PageSizeBits()); _gcs = new( _targetAddress, _targetPort, + networkBufferSettings: GetNetworkBufferSettings, + networkPool: GetNetworkPool, clusterProvider?.serverOptions.TlsOptions?.TlsClientOptions, authUsername: _username, authPassword: _passwd, - bufferSize: _clientBufferSize, logger: logger); } diff --git a/libs/cluster/Server/Migration/MigrateSessionSlots.cs b/libs/cluster/Server/Migration/MigrateSessionSlots.cs index 2efb389a25..ae817e7a43 100644 --- a/libs/cluster/Server/Migration/MigrateSessionSlots.cs +++ b/libs/cluster/Server/Migration/MigrateSessionSlots.cs @@ -14,12 +14,13 @@ internal sealed unsafe partial class MigrateSession : IDisposable /// public bool MigrateSlotsDriver() { - { - logger?.LogTrace("Initializing MainStore Iterator"); - var storeTailAddress = clusterProvider.storeWrapper.store.Log.TailAddress; - var bufferSize = 1 << clusterProvider.serverOptions.PageSizeBits(); - MigrationKeyIterationFunctions.MainStoreGetKeysInSlots mainStoreGetKeysInSlots = new(this, _sslots, bufferSize: bufferSize); + logger?.LogTrace("Initializing MainStore Iterator"); + var storeTailAddress = clusterProvider.storeWrapper.store.Log.TailAddress; + var bufferSize = 1 << clusterProvider.serverOptions.PageSizeBits(); + MigrationKeyIterationFunctions.MainStoreGetKeysInSlots mainStoreGetKeysInSlots = new(this, _sslots, bufferSize: bufferSize); + try + { logger?.LogTrace("Begin MainStore Iteration"); while (true) { @@ -46,6 +47,10 @@ public bool MigrateSlotsDriver() if (!HandleMigrateTaskResponse(_gcs.CompleteMigrate(_sourceNodeId, _replaceOption, isMainStore: true))) return false; } + finally + { + mainStoreGetKeysInSlots.Dispose(); + } if (!clusterProvider.serverOptions.DisableObjects) { @@ -54,31 +59,38 @@ public bool MigrateSlotsDriver() var objectBufferSize = 1 << clusterProvider.serverOptions.ObjectStorePageSizeBits(); MigrationKeyIterationFunctions.ObjectStoreGetKeysInSlots objectStoreGetKeysInSlots = new(this, _sslots, bufferSize: objectBufferSize); - logger?.LogTrace("Begin ObjectStore Iteration"); - while (true) + try { - // Iterate object store - _ = localServerSession.BasicGarnetApi.IterateObjectStore(ref objectStoreGetKeysInSlots, objectStoreTailAddress); + logger?.LogTrace("Begin ObjectStore Iteration"); + while (true) + { + // Iterate object store + _ = localServerSession.BasicGarnetApi.IterateObjectStore(ref objectStoreGetKeysInSlots, objectStoreTailAddress); - // If did not acquire any keys stop scanning - if (_keys.IsNullOrEmpty()) - break; + // If did not acquire any keys stop scanning + if (_keys.IsNullOrEmpty()) + break; - // Safely migrate keys to target node - if (!MigrateKeys()) - { - logger?.LogError("IOERR Migrate keys failed."); - Status = MigrateState.FAIL; - return false; + // Safely migrate keys to target node + if (!MigrateKeys()) + { + logger?.LogError("IOERR Migrate keys failed."); + Status = MigrateState.FAIL; + return false; + } + + objectStoreGetKeysInSlots.AdvanceIterator(); + ClearKeys(); } - objectStoreGetKeysInSlots.AdvanceIterator(); - ClearKeys(); + // Signal target transmission completed and log stats for object store after migration completes + if (!HandleMigrateTaskResponse(_gcs.CompleteMigrate(_sourceNodeId, _replaceOption, isMainStore: false))) + return false; + } + finally + { + objectStoreGetKeysInSlots.Dispose(); } - - // Signal target transmission completed and log stats for object store after migration completes - if (!HandleMigrateTaskResponse(_gcs.CompleteMigrate(_sourceNodeId, _replaceOption, isMainStore: false))) - return false; } return true; diff --git a/libs/cluster/Server/Migration/MigrationKeyIterationFunctions.cs b/libs/cluster/Server/Migration/MigrationKeyIterationFunctions.cs index b6edefbc68..7587afcec8 100644 --- a/libs/cluster/Server/Migration/MigrationKeyIterationFunctions.cs +++ b/libs/cluster/Server/Migration/MigrationKeyIterationFunctions.cs @@ -14,7 +14,7 @@ internal sealed unsafe partial class MigrateSession : IDisposable { internal sealed class MigrationKeyIterationFunctions { - internal unsafe struct MainStoreGetKeysInSlots : IScanIteratorFunctions + internal sealed unsafe class MainStoreGetKeysInSlots : IScanIteratorFunctions { MigrationScanIterator iterator; @@ -23,6 +23,11 @@ internal MainStoreGetKeysInSlots(MigrateSession session, HashSet slots, int iterator = new MigrationScanIterator(session, slots, bufferSize); } + internal void Dispose() + { + iterator.Dispose(); + } + public void AdvanceIterator() => iterator.AdvanceIterator(); public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) @@ -61,6 +66,11 @@ internal ObjectStoreGetKeysInSlots(MigrateSession session, HashSet slots, i iterator = new MigrationScanIterator(session, slots, bufferSize); } + internal void Dispose() + { + iterator.Dispose(); + } + public void AdvanceIterator() => iterator.AdvanceIterator(); public bool SingleReader(ref byte[] key, ref IGarnetObject value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) @@ -90,11 +100,13 @@ public void OnStop(bool completed, long numberOfRecords) { } public void OnException(Exception exception, long numberOfRecords) { } } - internal struct MigrationScanIterator + internal sealed class MigrationScanIterator { readonly MigrateSession session; readonly HashSet slots; + readonly PoolEntry poolEntry; + long offset; long currentOffset; byte[] keyBuffer; @@ -108,11 +120,17 @@ internal MigrationScanIterator(MigrateSession session, HashSet slots, int b offset = 0; currentOffset = 0; - keyBuffer = GC.AllocateArray(bufferSize, pinned: true); + poolEntry = session.GetNetworkPool.Get(size: bufferSize); + keyBuffer = poolEntry.entry; currPtr = (byte*)Unsafe.AsPointer(ref keyBuffer[0]); endPtr = (byte*)Unsafe.AsPointer(ref keyBuffer[^1]); } + internal void Dispose() + { + poolEntry.Dispose(); + } + /// /// Check if slot is scheduled for migration /// diff --git a/libs/cluster/Server/Migration/MigrationManager.cs b/libs/cluster/Server/Migration/MigrationManager.cs index e1eb393ac1..b57a8b88be 100644 --- a/libs/cluster/Server/Migration/MigrationManager.cs +++ b/libs/cluster/Server/Migration/MigrationManager.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; +using Garnet.common; using Garnet.server; using Microsoft.Extensions.Logging; @@ -10,20 +11,61 @@ namespace Garnet.cluster { internal sealed class MigrationManager { + const int initialReceiveBufferSize = 1 << 12; + readonly ILogger logger; readonly ClusterProvider clusterProvider; readonly MigrateSessionTaskStore migrationTaskStore; + /// + /// NetworkBufferSettings for MigrateSession instances + /// + readonly NetworkBufferSettings networkBufferSettings; + + /// + /// NetworkPool instance created according to spec + /// + readonly LimitedFixedBufferPool networkPool; + + /// + /// Get NetworkBuffers object + /// + public NetworkBufferSettings GetNetworkBufferSettings => networkBufferSettings; + + /// + /// Get NetworkPool instance + /// + public LimitedFixedBufferPool GetNetworkPool => networkPool; + public MigrationManager(ClusterProvider clusterProvider, ILogger logger = null) { - migrationTaskStore = new MigrateSessionTaskStore(logger); + this.logger = logger; + this.migrationTaskStore = new MigrateSessionTaskStore(logger); this.clusterProvider = clusterProvider; + var sendBufferSize = 1 << clusterProvider.serverOptions.PageSizeBits(); + this.networkBufferSettings = new NetworkBufferSettings(sendBufferSize, initialReceiveBufferSize); + this.networkPool = networkBufferSettings.CreateBufferPool(logger: logger); } + /// + /// Dispose + /// public void Dispose() { - migrationTaskStore.Dispose(); + migrationTaskStore?.Dispose(); + networkPool?.Dispose(); } + /// + /// Used to free up buffer pool + /// + public void Purge() => networkPool.Purge(); + + public string GetBufferPoolStats() => networkPool.GetStats(); + + /// + /// Get number of active migrate sessions + /// + /// public int GetMigrationTaskCount() => migrationTaskStore.GetNumSessions(); diff --git a/libs/cluster/Server/Replication/PrimaryOps/AofSyncTaskInfo.cs b/libs/cluster/Server/Replication/PrimaryOps/AofSyncTaskInfo.cs index fc4d35be1a..a9d9977910 100644 --- a/libs/cluster/Server/Replication/PrimaryOps/AofSyncTaskInfo.cs +++ b/libs/cluster/Server/Replication/PrimaryOps/AofSyncTaskInfo.cs @@ -19,7 +19,7 @@ internal sealed class AofSyncTaskInfo : IBulkLogEntryConsumer, IDisposable readonly ILogger logger; public readonly GarnetClientSession garnetClient; readonly CancellationTokenSource cts; - TsavoriteLogScanIterator iter; + TsavoriteLogScanSingleIterator iter; readonly long startAddress; public long previousAddress; @@ -29,7 +29,6 @@ public AofSyncTaskInfo( string localNodeId, string remoteNodeId, GarnetClientSession garnetClient, - CancellationTokenSource cts, long startAddress, ILogger logger) { @@ -39,14 +38,20 @@ public AofSyncTaskInfo( this.remoteNodeId = remoteNodeId; this.logger = logger; this.garnetClient = garnetClient; - this.cts = cts; this.startAddress = startAddress; previousAddress = startAddress; + this.cts = new CancellationTokenSource(); } public void Dispose() { + // First cancel the token + cts?.Cancel(); + + // Then, dispose the iterator. This will also signal the iterator so that it can observe the canceled token iter?.Dispose(); + + // Finally, dispose the cts cts?.Dispose(); } @@ -86,7 +91,7 @@ public async Task ReplicaSyncTask() { garnetClient.Connect(); - iter = clusterProvider.storeWrapper.appendOnlyFile.Scan(startAddress, long.MaxValue, name: remoteNodeId[..20], scanUncommitted: true, recover: false, logger: logger); + iter = clusterProvider.storeWrapper.appendOnlyFile.ScanSingle(startAddress, long.MaxValue, scanUncommitted: true, recover: false, logger: logger); while (true) { diff --git a/libs/cluster/Server/Replication/PrimaryOps/AofTaskStore.cs b/libs/cluster/Server/Replication/PrimaryOps/AofTaskStore.cs index 3967321bcb..f8bf5e4d34 100644 --- a/libs/cluster/Server/Replication/PrimaryOps/AofTaskStore.cs +++ b/libs/cluster/Server/Replication/PrimaryOps/AofTaskStore.cs @@ -4,7 +4,6 @@ using System; using System.Collections.Generic; using System.Diagnostics; -using System.Threading; using Garnet.client; using Garnet.common; using Microsoft.Extensions.Logging; @@ -41,7 +40,7 @@ public AofTaskStore(ClusterProvider clusterProvider, int initialSize = 1, ILogge logPageSizeMask = logPageSize - 1; if (clusterProvider.serverOptions.MainMemoryReplication) clusterProvider.storeWrapper.appendOnlyFile.SafeTailShiftCallback = SafeTailShiftCallback; - TruncateLagAddress = clusterProvider.storeWrapper.appendOnlyFile.UnsafeGetReadOnlyLagAddress() - 2 * logPageSize; + TruncateLagAddress = clusterProvider.storeWrapper.appendOnlyFile.UnsafeGetReadOnlyAddressLagOffset() - 2 * logPageSize; } TruncatedUntil = 0; } @@ -96,11 +95,11 @@ internal void SafeTailShiftCallback(long oldTailAddress, long newTailAddress) public void Dispose() { - _lock.WriteLock(); try { + _lock.WriteLock(); _disposed = true; - for (int i = 0; i < numTasks; i++) + for (var i = 0; i < numTasks; i++) { var task = tasks[i]; task.Dispose(); @@ -119,7 +118,7 @@ public bool TryAddReplicationTask(string remoteNodeId, long startAddress, out Ao aofSyncTaskInfo = null; if (startAddress == 0) startAddress = ReplicationManager.kFirstValidAofAddress; - bool success = false; + var success = false; var current = clusterProvider.clusterManager.CurrentConfig; var (address, port) = current.GetWorkerAddressFromNodeId(remoteNodeId); @@ -131,8 +130,15 @@ public bool TryAddReplicationTask(string remoteNodeId, long startAddress, out Ao this, current.LocalNodeId, remoteNodeId, - new GarnetClientSession(address, port, clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, authUsername: clusterProvider.ClusterUsername, authPassword: clusterProvider.ClusterPassword, 1 << 22, logger: logger), - new CancellationTokenSource(), + new GarnetClientSession( + address, + port, + clusterProvider.replicationManager.GetAofSyncNetworkBufferSettings, + clusterProvider.replicationManager.GetNetworkPool, + tlsOptions: clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, + authUsername: clusterProvider.ClusterUsername, + authPassword: clusterProvider.ClusterPassword, + logger: logger), startAddress, logger); } @@ -280,7 +286,7 @@ public long SafeTruncateAof(long CheckpointCoveredAofAddress = long.MaxValue) { if (clusterProvider.serverOptions.MainMemoryReplication) { - clusterProvider.storeWrapper.appendOnlyFile?.UnsafeShiftBeginAddress(TruncatedUntil, snapToPageStart: true, truncateLog: true, noFlush: true); + clusterProvider.storeWrapper.appendOnlyFile?.UnsafeShiftBeginAddress(TruncatedUntil, snapToPageStart: true, truncateLog: true); } else { diff --git a/libs/cluster/Server/Replication/PrimaryOps/PrimarySendCheckpoint.cs b/libs/cluster/Server/Replication/PrimaryOps/PrimarySendCheckpoint.cs index c191ea65e2..11ba423ea3 100644 --- a/libs/cluster/Server/Replication/PrimaryOps/PrimarySendCheckpoint.cs +++ b/libs/cluster/Server/Replication/PrimaryOps/PrimarySendCheckpoint.cs @@ -57,8 +57,5 @@ private bool ReplicaSyncSessionBackgroundTask(string replicaId, out ReadOnlySpan replicaSyncSessionTaskStore.TryRemove(replicaId); } } - - public void DisposeReplicaSyncSessionTasks() - => replicaSyncSessionTaskStore.Dispose(); } } \ No newline at end of file diff --git a/libs/cluster/Server/Replication/PrimaryOps/ReplicaSyncSession.cs b/libs/cluster/Server/Replication/PrimaryOps/ReplicaSyncSession.cs index fec830be7d..8b68e48bc1 100644 --- a/libs/cluster/Server/Replication/PrimaryOps/ReplicaSyncSession.cs +++ b/libs/cluster/Server/Replication/PrimaryOps/ReplicaSyncSession.cs @@ -60,7 +60,15 @@ public async Task SendCheckpoint() return false; } - GarnetClientSession gcs = new(address, port, clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, authUsername: clusterProvider.ClusterUsername, authPassword: clusterProvider.ClusterPassword, bufferSize: 1 << 20, logger: logger); + GarnetClientSession gcs = new( + address, + port, + clusterProvider.replicationManager.GetRSSNetworkBufferSettings, + clusterProvider.replicationManager.GetNetworkPool, + tlsOptions: clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, + authUsername: clusterProvider.ClusterUsername, + authPassword: clusterProvider.ClusterPassword, + logger: logger); CheckpointEntry localEntry = default; AofSyncTaskInfo aofSyncTaskInfo = null; diff --git a/libs/cluster/Server/Replication/ReplicaOps/ReceiveCheckpointHandler.cs b/libs/cluster/Server/Replication/ReplicaOps/ReceiveCheckpointHandler.cs index 08ae913e94..b91bbfe635 100644 --- a/libs/cluster/Server/Replication/ReplicaOps/ReceiveCheckpointHandler.cs +++ b/libs/cluster/Server/Replication/ReplicaOps/ReceiveCheckpointHandler.cs @@ -104,7 +104,7 @@ private unsafe void IOCallback(uint errorCode, uint numBytes, object context) { if (errorCode != 0) { - string errorMessage = new Win32Exception((int)errorCode).Message; + var errorMessage = new Win32Exception((int)errorCode).Message; logger?.LogError("[Replica] OverlappedStream GetQueuedCompletionStatus error: {errorCode} msg: {errorMessage}", errorCode, errorMessage); } writeCheckpointSemaphore.Release(); diff --git a/libs/cluster/Server/Replication/ReplicaOps/ReplicaReceiveCheckpoint.cs b/libs/cluster/Server/Replication/ReplicaOps/ReplicaReceiveCheckpoint.cs index 64b7af34c6..2044de0f0a 100644 --- a/libs/cluster/Server/Replication/ReplicaOps/ReplicaReceiveCheckpoint.cs +++ b/libs/cluster/Server/Replication/ReplicaOps/ReplicaReceiveCheckpoint.cs @@ -69,7 +69,6 @@ public bool TryBeginReplicate(ClusterSession session, string nodeid, bool backgr } } - /// /// Try to initiate the attach to primary sequence to recover checkpoint, replay AOF and start AOF stream. /// NOTE: @@ -147,7 +146,14 @@ private async Task InitiateReplicaSync() try { - gcs = new(address, port, clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, authUsername: clusterProvider.ClusterUsername, authPassword: clusterProvider.ClusterPassword, bufferSize: 1 << 21); + gcs = new( + address, + port, + clusterProvider.replicationManager.GetIRSNetworkBufferSettings, + clusterProvider.replicationManager.GetNetworkPool, + tlsOptions: clusterProvider.serverOptions.TlsOptions?.TlsClientOptions, + authUsername: clusterProvider.ClusterUsername, + authPassword: clusterProvider.ClusterPassword); recvCheckpointHandler = new ReceiveCheckpointHandler(clusterProvider, logger); gcs.Connect(); diff --git a/libs/cluster/Server/Replication/ReplicaOps/ReplicationReplicaAofSync.cs b/libs/cluster/Server/Replication/ReplicaOps/ReplicationReplicaAofSync.cs index 4f799be4fe..446eb15491 100644 --- a/libs/cluster/Server/Replication/ReplicaOps/ReplicationReplicaAofSync.cs +++ b/libs/cluster/Server/Replication/ReplicaOps/ReplicationReplicaAofSync.cs @@ -27,24 +27,30 @@ public unsafe void ProcessPrimaryStream(byte* record, int recordLength, long pre if (clusterProvider.replicationManager.Recovering) { logger?.LogWarning("Replica is recovering cannot sync AOF"); - throw new GarnetException("Replica is recovering cannot sync AOF", LogLevel.Warning); + throw new GarnetException("Replica is recovering cannot sync AOF", LogLevel.Warning, clientResponse: false); } if (currentConfig.LocalNodeRole != NodeRole.REPLICA) { logger?.LogWarning("This node {nodeId} is not a replica", currentConfig.LocalNodeId); - throw new GarnetException($"This node {currentConfig.LocalNodeId} is not a replica", LogLevel.Warning); + throw new GarnetException($"This node {currentConfig.LocalNodeId} is not a replica", LogLevel.Warning, clientResponse: false); } if (clusterProvider.serverOptions.MainMemoryReplication) { - var firstRecordLength = GetFirstAofEntryLength(record); - if (previousAddress > ReplicationOffset || - currentAddress > previousAddress + firstRecordLength) + // If the incoming AOF chunk fits in the space between previousAddress and currentAddress (ReplicationOffset), + // an enqueue will result in an offset mismatch. So, we have to first reset the AOF to point to currentAddress. + if (currentAddress > previousAddress) { - logger?.LogWarning("MainMemoryReplication: Skipping from {ReplicaReplicationOffset} to {currentAddress}", ReplicationOffset, currentAddress); - storeWrapper.appendOnlyFile.Initialize(currentAddress, currentAddress); - ReplicationOffset = currentAddress; + if ( + (currentAddress % (1 << storeWrapper.appendOnlyFile.UnsafeGetLogPageSizeBits()) != 0) || // the skip was to a non-page-boundary + (currentAddress >= previousAddress + recordLength) // the skip will not be auto-handled by the AOF enqueue + ) + { + logger?.LogWarning("MainMemoryReplication: Skipping from {ReplicaReplicationOffset} to {currentAddress}", ReplicationOffset, currentAddress); + storeWrapper.appendOnlyFile.Initialize(currentAddress, currentAddress); + ReplicationOffset = currentAddress; + } } } @@ -53,7 +59,7 @@ public unsafe void ProcessPrimaryStream(byte* record, int recordLength, long pre { logger?.LogInformation("Processing {recordLength} bytes; previousAddress {previousAddress}, currentAddress {currentAddress}, nextAddress {nextAddress}, current AOF tail {tail}", recordLength, previousAddress, currentAddress, nextAddress, storeWrapper.appendOnlyFile.TailAddress); logger?.LogError("Before ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicaReplicationOffset}, aof.TailAddress {tailAddress}", ReplicationOffset, storeWrapper.appendOnlyFile.TailAddress); - throw new GarnetException($"Before ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, aof.TailAddress {storeWrapper.appendOnlyFile.TailAddress}", LogLevel.Warning); + throw new GarnetException($"Before ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, aof.TailAddress {storeWrapper.appendOnlyFile.TailAddress}", LogLevel.Warning, clientResponse: false); } // Enqueue to AOF @@ -76,7 +82,7 @@ public unsafe void ProcessPrimaryStream(byte* record, int recordLength, long pre { if (!clusterProvider.serverOptions.EnableFastCommit) { - throw new Exception("Received FastCommit request at replica AOF processor, but FastCommit is not enabled"); + throw new GarnetException("Received FastCommit request at replica AOF processor, but FastCommit is not enabled", clientResponse: false); } TsavoriteLogRecoveryInfo info = new(); info.Initialize(new ReadOnlySpan(ptr + entryLength, -payloadLength)); @@ -90,19 +96,19 @@ public unsafe void ProcessPrimaryStream(byte* record, int recordLength, long pre if (ReplicationOffset != nextAddress) { logger?.LogWarning("Replication offset mismatch: ReplicaReplicationOffset {ReplicaReplicationOffset}, nextAddress {nextAddress}", ReplicationOffset, nextAddress); - throw new GarnetException($"Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, nextAddress {nextAddress}", LogLevel.Warning); + throw new GarnetException($"Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, nextAddress {nextAddress}", LogLevel.Warning, clientResponse: false); } if (ReplicationOffset != storeWrapper.appendOnlyFile.TailAddress) { logger?.LogWarning("After ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicaReplicationOffset}, aof.TailAddress {tailAddress}", ReplicationOffset, storeWrapper.appendOnlyFile.TailAddress); - throw new GarnetException($"After ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, aof.TailAddress {storeWrapper.appendOnlyFile.TailAddress}", LogLevel.Warning); + throw new GarnetException($"After ProcessPrimaryStream: Replication offset mismatch: ReplicaReplicationOffset {ReplicationOffset}, aof.TailAddress {storeWrapper.appendOnlyFile.TailAddress}", LogLevel.Warning, clientResponse: false); } } catch (Exception ex) { logger?.LogWarning(ex, "An exception occurred at ReplicationManager.ProcessPrimaryStream"); - throw new GarnetException(ex.Message, ex, LogLevel.Warning); + throw new GarnetException(ex.Message, ex, LogLevel.Warning, clientResponse: false); } } diff --git a/libs/cluster/Server/Replication/ReplicationManager.cs b/libs/cluster/Server/Replication/ReplicationManager.cs index 56c191b58a..78e6844163 100644 --- a/libs/cluster/Server/Replication/ReplicationManager.cs +++ b/libs/cluster/Server/Replication/ReplicationManager.cs @@ -89,8 +89,13 @@ public long GetCurrentSafeAofAddress() public ReplicationManager(ClusterProvider clusterProvider, ILogger logger = null) { var opts = clusterProvider.serverOptions; + this.logger = logger; this.clusterProvider = clusterProvider; this.storeWrapper = clusterProvider.storeWrapper; + + this.networkPool = networkBufferSettings.CreateBufferPool(logger: logger); + ValidateNetworkBufferSettings(); + aofProcessor = new AofProcessor(storeWrapper, recordToAof: false, logger: logger); replicaSyncSessionTaskStore = new ReplicaSyncSessionTaskStore(storeWrapper, clusterProvider, logger); @@ -105,8 +110,6 @@ public ReplicationManager(ClusterProvider clusterProvider, ILogger logger = null if (clusterProvider.clusterManager.CurrentConfig.LocalNodeRole == NodeRole.REPLICA && !StartRecovery()) throw new Exception(Encoding.ASCII.GetString(CmdStrings.RESP_ERR_GENERIC_CANNOT_ACQUIRE_RECOVERY_LOCK)); - this.logger = logger; - checkpointStore = new CheckpointStore(storeWrapper, clusterProvider, true, logger); aofTaskStore = new(clusterProvider, 1, logger); @@ -130,6 +133,13 @@ public ReplicationManager(ClusterProvider clusterProvider, ILogger logger = null SetPrimaryReplicationId(); } + /// + /// Used to free up buffer pool + /// + public void Purge() => networkPool.Purge(); + + public string GetBufferPoolStats() => networkPool.GetStats(); + void CheckpointVersionShift(bool isMainStore, long oldVersion, long newVersion) { if (clusterProvider.clusterManager.CurrentConfig.LocalNodeRole == NodeRole.REPLICA) @@ -172,17 +182,12 @@ public void Dispose() pool.Free(); checkpointStore.WaitForReplicas(); - DisposeReplicaSyncSessionTasks(); - DisposeConnections(); replicaSyncSessionTaskStore.Dispose(); - ctsRepManager.Dispose(); - aofProcessor?.Dispose(); - } - - public void DisposeConnections() - { ctsRepManager.Cancel(); + ctsRepManager.Dispose(); aofTaskStore.Dispose(); + aofProcessor?.Dispose(); + networkPool?.Dispose(); } /// diff --git a/libs/cluster/Server/Replication/ReplicationNetworkBufferSettings.cs b/libs/cluster/Server/Replication/ReplicationNetworkBufferSettings.cs new file mode 100644 index 0000000000..a94dea9c71 --- /dev/null +++ b/libs/cluster/Server/Replication/ReplicationNetworkBufferSettings.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using Garnet.common; +using Microsoft.Extensions.Logging; + +namespace Garnet.cluster +{ + internal sealed partial class ReplicationManager : IDisposable + { + /// + /// NetworkBufferSettings for the buffer pool maintained by the ReplicationManager + /// + const int defaultSendBufferSize = 1 << 22; + const int defaultInitialReceiveBufferSize = 1 << 12; + readonly NetworkBufferSettings networkBufferSettings = new(defaultSendBufferSize, defaultInitialReceiveBufferSize); + + /// + /// Network pool maintained by the ReplicationManager + /// + readonly LimitedFixedBufferPool networkPool; + public LimitedFixedBufferPool GetNetworkPool => networkPool; + + /// + /// NetworkBufferSettings for the replica sync session clients + /// + const int rssSendBufferSize = 1 << 20; + const int rssInitialReceiveBufferSize = 1 << 12; + public NetworkBufferSettings GetRSSNetworkBufferSettings { get; } = new(rssSendBufferSize, rssInitialReceiveBufferSize); + + /// + /// NetworkBufferSettings for the client used for InitiateReplicaSync (ReplicateReceiveCheckpoint.cs) + /// + const int irsSendBufferSize = 1 << 17; + const int irsInitialReceiveBufferSize = 1 << 17; + public NetworkBufferSettings GetIRSNetworkBufferSettings { get; } = new(irsSendBufferSize, irsInitialReceiveBufferSize); + + /// + /// NetworkBufferSettings for the AOF sync task clients + /// + const int aofSyncSendBufferSize = 1 << 22; + const int aofSyncInitialReceiveBufferSize = 1 << 17; + public NetworkBufferSettings GetAofSyncNetworkBufferSettings { get; } = new(aofSyncSendBufferSize, aofSyncInitialReceiveBufferSize); + + void ValidateNetworkBufferSettings() + { + if (!networkPool.Validate(GetRSSNetworkBufferSettings)) + logger?.LogWarning("NetworkBufferSettings for ReplicaSyncSession do not allow for buffer re-use with configured NetworkPool"); + + if (!networkPool.Validate(GetIRSNetworkBufferSettings)) + logger?.LogWarning("NetworkBufferSettings for InitiateReplicaSync do not allow for buffer re-use with configured NetworkPool"); + + if (!networkPool.Validate(GetAofSyncNetworkBufferSettings)) + logger?.LogWarning("NetworkBufferSettings for AofSyncTask do not allow for buffer re-use with configured NetworkPool"); + } + } +} \ No newline at end of file diff --git a/libs/cluster/Session/ClusterCommands.cs b/libs/cluster/Session/ClusterCommands.cs index 0096b85946..33469ce790 100644 --- a/libs/cluster/Session/ClusterCommands.cs +++ b/libs/cluster/Session/ClusterCommands.cs @@ -20,7 +20,7 @@ private int CountKeysInSessionStore(int slot) { ClusterKeyIterationFunctions.MainStoreCountKeys iterFuncs = new(slot); _ = basicGarnetApi.IterateMainStore(ref iterFuncs); - return iterFuncs.keyCount; + return iterFuncs.KeyCount; } private int CountKeysInObjectStore(int slot) @@ -29,7 +29,7 @@ private int CountKeysInObjectStore(int slot) { ClusterKeyIterationFunctions.ObjectStoreCountKeys iterFuncs = new(slot); _ = basicGarnetApi.IterateObjectStore(ref iterFuncs); - return iterFuncs.keyCount; + return iterFuncs.KeyCount; } return 0; } @@ -178,6 +178,7 @@ private void ProcessClusterCommands(RespCommand command, out bool invalidParamet RespCommand.CLUSTER_SLOTSTATE => NetworkClusterSlotState(out invalidParameters), _ => throw new Exception($"Unexpected cluster subcommand: {command}") }; + this.sessionMetrics?.incr_total_cluster_commands_processed(); } } } \ No newline at end of file diff --git a/libs/cluster/Session/ClusterKeyIterationFunctions.cs b/libs/cluster/Session/ClusterKeyIterationFunctions.cs index b8dbdaf2e9..54d91d6cd3 100644 --- a/libs/cluster/Session/ClusterKeyIterationFunctions.cs +++ b/libs/cluster/Session/ClusterKeyIterationFunctions.cs @@ -13,18 +13,30 @@ internal sealed unsafe partial class ClusterSession : IClusterSession { internal static class ClusterKeyIterationFunctions { - internal struct MainStoreCountKeys : IScanIteratorFunctions + internal class KeyIterationInfo { + // This must be a class as it is passed through pending IO operations, so it is wrapped by higher structures for inlining as a generic type arg. internal int keyCount; - readonly int slot; + internal readonly int slot; + + internal KeyIterationInfo(int slot) => this.slot = slot; + } + + internal sealed class MainStoreCountKeys : IScanIteratorFunctions + { + private readonly KeyIterationInfo info; + // This must be a class as it is passed through pending IO operations + + internal int KeyCount { get => info.keyCount; set => info.keyCount = value; } + internal int Slot => info.slot; - internal MainStoreCountKeys(int slot) => this.slot = slot; + internal MainStoreCountKeys(int slot) => info = new(slot); public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) { cursorRecordResult = CursorRecordResult.Accept; // default; not used here - if (HashSlotUtils.HashSlot(ref key) == slot && !Expired(ref value)) - keyCount++; + if (HashSlotUtils.HashSlot(ref key) == Slot && !Expired(ref value)) + KeyCount++; return true; } public bool ConcurrentReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) @@ -34,20 +46,23 @@ public void OnStop(bool completed, long numberOfRecords) { } public void OnException(Exception exception, long numberOfRecords) { } } - internal struct ObjectStoreCountKeys : IScanIteratorFunctions + internal sealed class ObjectStoreCountKeys : IScanIteratorFunctions { - internal int keyCount; - readonly int slot; + private readonly KeyIterationInfo info; + // This must be a class as it is passed through pending IO operations - internal ObjectStoreCountKeys(int slot) => this.slot = slot; + internal int KeyCount { get => info.keyCount; set => info.keyCount = value; } + internal int Slot => info.slot; + + internal ObjectStoreCountKeys(int slot) => info = new(slot); public bool SingleReader(ref byte[] key, ref IGarnetObject value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) { cursorRecordResult = CursorRecordResult.Accept; // default; not used here , out CursorRecordResult cursorRecordResult fixed (byte* keyPtr = key) { - if (HashSlotUtils.HashSlot(keyPtr, key.Length) == slot && !Expired(ref value)) - keyCount++; + if (HashSlotUtils.HashSlot(keyPtr, key.Length) == Slot && !Expired(ref value)) + KeyCount++; } return true; } diff --git a/libs/cluster/Session/RespClusterBasicCommands.cs b/libs/cluster/Session/RespClusterBasicCommands.cs index f40c1350fc..295c5f16c9 100644 --- a/libs/cluster/Session/RespClusterBasicCommands.cs +++ b/libs/cluster/Session/RespClusterBasicCommands.cs @@ -11,6 +11,8 @@ namespace Garnet.cluster { internal sealed unsafe partial class ClusterSession : IClusterSession { + public string RemoteNodeId { get; private set; } + /// /// Implements CLUSTER BUMPEPOCH command /// @@ -378,6 +380,10 @@ private bool NetworkClusterGossip(out bool invalidParameters) if (gossipWithMeet || current.IsKnown(other.LocalNodeId)) { _ = clusterProvider.clusterManager.TryMerge(other); + + // Remember that this connection is being used for another cluster node to talk to us + Debug.Assert(RemoteNodeId is null || RemoteNodeId == other.LocalNodeId, "Node Id shouldn't change once set for a connection"); + RemoteNodeId = other.LocalNodeId; } else logger?.LogWarning("Received gossip from unknown node: {node-id}", other.LocalNodeId); diff --git a/libs/cluster/Session/RespClusterReplicationCommands.cs b/libs/cluster/Session/RespClusterReplicationCommands.cs index 96c99b7c24..7fd9d416a2 100644 --- a/libs/cluster/Session/RespClusterReplicationCommands.cs +++ b/libs/cluster/Session/RespClusterReplicationCommands.cs @@ -5,6 +5,7 @@ using System.Text; using Garnet.common; using Garnet.server; +using Microsoft.Extensions.Logging; namespace Garnet.cluster { @@ -169,8 +170,7 @@ private bool NetworkClusterAppendLog(out bool invalidParameters) !parseState.TryGetLong(2, out var currentAddress) || !parseState.TryGetLong(3, out var nextAddress)) { - while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER, ref dcurr, dend)) - SendAndReset(); + logger?.LogError("{str}", Encoding.ASCII.GetString(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER)); return true; } @@ -181,22 +181,16 @@ private bool NetworkClusterAppendLog(out bool invalidParameters) var primaryId = currentConfig.LocalNodePrimaryId; if (localRole != NodeRole.REPLICA) { - // TODO: handle this - //while (!RespWriteUtils.WriteError("ERR aofsync node not a replica"u8, ref dcurr, dend)) - // SendAndReset(); + throw new GarnetException("aofsync node not a replica", LogLevel.Error, clientResponse: false); } else if (!primaryId.Equals(nodeId)) { - // TODO: handle this - //while (!RespWriteUtils.WriteError($"ERR aofsync node replicating {primaryId}", ref dcurr, dend)) - // SendAndReset(); + throw new GarnetException($"aofsync node replicating {primaryId}", LogLevel.Error, clientResponse: false); } else { clusterProvider.replicationManager.ProcessPrimaryStream(sbRecord.ToPointer(), sbRecord.Length, previousAddress, currentAddress, nextAddress); - //while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_OK, ref dcurr, dend)) - // SendAndReset(); } return true; diff --git a/libs/common/ConvertUtils.cs b/libs/common/ConvertUtils.cs index 3228d49e55..140d97ed36 100644 --- a/libs/common/ConvertUtils.cs +++ b/libs/common/ConvertUtils.cs @@ -2,7 +2,7 @@ // Licensed under the MIT license. using System; -using System.Diagnostics; +using System.Runtime.CompilerServices; namespace Garnet.common { @@ -11,6 +11,11 @@ namespace Garnet.common /// public static class ConvertUtils { + /// + /// Contains the number of ticks representing 1970/1/1. Value is equal to new DateTime(1970, 1, 1).Ticks + /// + private static readonly long _unixEpochTicks = DateTimeOffset.UnixEpoch.Ticks; + /// /// Convert diff ticks - utcNow.ticks to seconds. /// @@ -43,5 +48,47 @@ public static long MillisecondsFromDiffUtcNowTicks(long ticks) } return milliseconds; } + + /// + /// Converts a Unix timestamp in seconds to ticks. + /// + /// The Unix timestamp in seconds. + /// The equivalent number of ticks. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long UnixTimestampInSecondsToTicks(long unixTimestamp) + { + return unixTimestamp * TimeSpan.TicksPerSecond + _unixEpochTicks; + } + + /// + /// Converts a Unix timestamp in milliseconds to ticks. + /// + /// The Unix timestamp in milliseconds. + /// The equivalent number of ticks. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long UnixTimestampInMillisecondsToTicks(long unixTimestamp) + { + return unixTimestamp * TimeSpan.TicksPerMillisecond + _unixEpochTicks; + } + + /// + /// Convert ticks to Unix time in seconds. + /// + /// The ticks to convert. + /// The Unix time in seconds. + public static long UnixTimeInSecondsFromTicks(long ticks) + { + return ticks > 0 ? (ticks - _unixEpochTicks) / TimeSpan.TicksPerSecond : -1; + } + + /// + /// Convert ticks to Unix time in milliseconds. + /// + /// The ticks to convert. + /// The Unix time in milliseconds. + public static long UnixTimeInMillisecondsFromTicks(long ticks) + { + return ticks > 0 ? (ticks - _unixEpochTicks) / TimeSpan.TicksPerMillisecond : -1; + } } } \ No newline at end of file diff --git a/libs/common/EnumUtils.cs b/libs/common/EnumUtils.cs index 07adc9f380..7ac88a6557 100644 --- a/libs/common/EnumUtils.cs +++ b/libs/common/EnumUtils.cs @@ -91,8 +91,8 @@ public static bool TryParseEnumFromDescription(string strVal, out T val) wher private static void AddTypeToCache() { - var valToDesc = new Dictionary(); - var descToVals = new Dictionary>(); + var valToDesc = new Dictionary(StringComparer.OrdinalIgnoreCase); + var descToVals = new Dictionary>(StringComparer.OrdinalIgnoreCase); foreach (var flagFieldInfo in typeof(T).GetFields()) { diff --git a/libs/common/Format.cs b/libs/common/Format.cs index 92b28d480d..c02a758cc9 100644 --- a/libs/common/Format.cs +++ b/libs/common/Format.cs @@ -168,6 +168,21 @@ public static string GetHostName(ILogger? logger = null) return ""; } + + public static string MemoryBytes(long size) + { + if( size < (1 << 20)) + return KiloBytes(size); + else if (size < (1 << 30)) + return MegaBytes(size); + else return GigaBytes(size); + } + + public static string GigaBytes(long size) => (((size - 1) >> 30) + 1).ToString("n0") + "GB"; + + public static string MegaBytes(long size) => (((size - 1) >> 20) + 1).ToString("n0") + "MB"; + + public static string KiloBytes(long size) =>(((size - 1) >> 10) + 1).ToString("n0") + "KB"; } #pragma warning restore format } \ No newline at end of file diff --git a/libs/common/GarnetException.cs b/libs/common/GarnetException.cs index bc359cbefd..daaf99f2bd 100644 --- a/libs/common/GarnetException.cs +++ b/libs/common/GarnetException.cs @@ -16,22 +16,25 @@ public class GarnetException : Exception /// LogLevel for this exception /// public LogLevel LogLevel { get; } = LogLevel.Trace; + public bool ClientResponse { get; } = true; /// /// Throw Garnet exception /// - public GarnetException(LogLevel logLevel = LogLevel.Trace) + public GarnetException(LogLevel logLevel = LogLevel.Trace, bool clientResponse = true) { LogLevel = logLevel; + ClientResponse = clientResponse; } /// /// Throw Garnet exception with message /// /// - public GarnetException(string message, LogLevel logLevel = LogLevel.Trace) : base(message) + public GarnetException(string message, LogLevel logLevel = LogLevel.Trace, bool clientResponse = true) : base(message) { LogLevel = logLevel; + ClientResponse = clientResponse; } /// @@ -39,9 +42,10 @@ public GarnetException(string message, LogLevel logLevel = LogLevel.Trace) : bas /// /// /// - public GarnetException(string message, Exception innerException, LogLevel logLevel = LogLevel.Trace) : base(message, innerException) + public GarnetException(string message, Exception innerException, LogLevel logLevel = LogLevel.Trace, bool clientResponse = true) : base(message, innerException) { LogLevel = logLevel; + ClientResponse = clientResponse; } /// diff --git a/libs/common/LightClient.cs b/libs/common/LightClient.cs index da976993d2..e4add4cdf4 100644 --- a/libs/common/LightClient.cs +++ b/libs/common/LightClient.cs @@ -37,7 +37,7 @@ public unsafe class LightClient : ClientBase, IServerHook, IMessageConsumer public int opType; LightClientTcpNetworkHandler networkHandler; - + readonly NetworkBufferSettings networkBufferSettings; readonly LimitedFixedBufferPool networkPool; /// @@ -58,7 +58,8 @@ public LightClient( SslClientAuthenticationOptions sslOptions = null) : base(address, port, BufferSize) { - this.networkPool = new LimitedFixedBufferPool(BufferSize); + this.networkBufferSettings = new NetworkBufferSettings(BufferSize, BufferSize); + this.networkPool = networkBufferSettings.CreateBufferPool(); this.onResponseDelegateUnsafe = onResponseDelegateUnsafe ?? new OnResponseDelegateUnsafe(DefaultLightReceiveUnsafe); this.opType = opType; this.BufferSize = BufferSize; @@ -109,7 +110,7 @@ public void ReadOnly() public override void Connect() { socket = GetSendSocket(address, port); - networkHandler = new LightClientTcpNetworkHandler(this, socket, networkPool, sslOptions != null, this); + networkHandler = new LightClientTcpNetworkHandler(this, socket, networkBufferSettings, networkPool, sslOptions != null, this); networkHandler.StartAsync(sslOptions, $"{address}:{port}").ConfigureAwait(false).GetAwaiter().GetResult(); networkSender = networkHandler.GetNetworkSender(); networkSender.GetResponseObject(); @@ -212,7 +213,7 @@ public override void Dispose() { networkSender.ReturnResponseObject(); networkHandler?.Dispose(); - networkPool.Dispose(); + networkPool?.Dispose(); } /// diff --git a/libs/common/LightClientTcpNetworkHandler.cs b/libs/common/LightClientTcpNetworkHandler.cs index 12816e5f8f..dce0a76e9a 100644 --- a/libs/common/LightClientTcpNetworkHandler.cs +++ b/libs/common/LightClientTcpNetworkHandler.cs @@ -10,8 +10,8 @@ namespace Garnet.server { internal sealed class LightClientTcpNetworkHandler : TcpNetworkHandler { - public LightClientTcpNetworkHandler(LightClient serverHook, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) - : base(serverHook, socket, networkPool, useTLS, messageConsumer, networkSendThrottleMax, logger) + public LightClientTcpNetworkHandler(LightClient serverHook, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer, int networkSendThrottleMax = 8, ILogger logger = null) + : base(serverHook, socket, networkBufferSettings, networkPool, useTLS, messageConsumer, networkSendThrottleMax: networkSendThrottleMax, logger: logger) { } diff --git a/libs/common/Memory/LimitedFixedBufferPool.cs b/libs/common/Memory/LimitedFixedBufferPool.cs index bdac9cab95..6c62c9f4d1 100644 --- a/libs/common/Memory/LimitedFixedBufferPool.cs +++ b/libs/common/Memory/LimitedFixedBufferPool.cs @@ -20,6 +20,10 @@ public sealed class LimitedFixedBufferPool : IDisposable { readonly PoolLevel[] pool; readonly int numLevels, minAllocationSize, maxEntriesPerLevel; + /// + /// This is the maximum allocated buffer size that the instance can support based on the number of pool levels. + /// + readonly int maxAllocationSize; readonly ILogger logger; /// @@ -27,7 +31,15 @@ public sealed class LimitedFixedBufferPool : IDisposable /// public int MinAllocationSize => minAllocationSize; - int totalAllocations; + /// + /// Total outstanding allocation references + /// + int totalReferences; + + /// + /// Total out of bound allocation requests + /// + int totalOutOfBoundAllocations; /// /// Constructor @@ -35,19 +47,45 @@ public sealed class LimitedFixedBufferPool : IDisposable public LimitedFixedBufferPool(int minAllocationSize, int maxEntriesPerLevel = 16, int numLevels = 4, ILogger logger = null) { this.minAllocationSize = minAllocationSize; + this.maxAllocationSize = minAllocationSize << (numLevels - 1); this.maxEntriesPerLevel = maxEntriesPerLevel; this.numLevels = numLevels; this.logger = logger; pool = new PoolLevel[numLevels]; } + /// + /// Validate if provided settings against the provided pool instance + /// + /// + /// + public bool Validate(NetworkBufferSettings settings) + { + var sendBufferSize = settings.sendBufferSize; + // Send buffer size should be inclusive of the max and min allocation sizes of this instance + if (sendBufferSize > maxAllocationSize || sendBufferSize < minAllocationSize) + return false; + + var initialReceiveSize = settings.initialReceiveBufferSize; + // Initial received buffer size should be inclusive of the max and min allocation sizes of this instance + if (initialReceiveSize > maxAllocationSize || initialReceiveSize < minAllocationSize) + return false; + + var maxReceiveBufferSize = settings.maxReceiveBufferSize; + // Maximum receive size should be inclusive of the max and min allocation sizes of this instance + if (maxReceiveBufferSize > maxAllocationSize || maxReceiveBufferSize < minAllocationSize) + return false; + + return true; + } + /// /// Return /// [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Return(PoolEntry buffer) { - int level = Position(buffer.entry.Length); + var level = Position(buffer.entry.Length); if (level >= 0) { if (pool[level] != null) @@ -61,8 +99,8 @@ public void Return(PoolEntry buffer) Interlocked.Decrement(ref pool[level].size); } } - Debug.Assert(totalAllocations > 0, $"Return with {totalAllocations}"); - Interlocked.Decrement(ref totalAllocations); + Debug.Assert(totalReferences > 0, $"Return with {totalReferences}"); + Interlocked.Decrement(ref totalReferences); } /// @@ -73,14 +111,16 @@ public void Return(PoolEntry buffer) [MethodImpl(MethodImplOptions.AggressiveInlining)] public unsafe PoolEntry Get(int size) { - if (Interlocked.Increment(ref totalAllocations) < 0) + if (Interlocked.Increment(ref totalReferences) < 0) { - Interlocked.Decrement(ref totalAllocations); + Interlocked.Decrement(ref totalReferences); logger?.LogError("Invalid Get on disposed pool"); return null; } - int level = Position(size); + var level = Position(size); + if (level == -1) Interlocked.Increment(ref totalOutOfBoundAllocations); + if (level >= 0) { if (pool[level] == null) @@ -99,7 +139,28 @@ public unsafe PoolEntry Get(int size) } /// - /// Free buffer + /// Purge pool entries from all levels + /// NOTE: + /// This is used to reclaim any unused buffer pool entries that were previously allocated. + /// It does not wait for all referenced buffers to be returned. + /// Use Dispose of you want to destroy this instance. + /// + public void Purge() + { + for (var i = 0; i < numLevels; i++) + { + if (pool[i] == null) continue; + // Keep trying Dequeuing until no items left to free + while (pool[i].items.TryDequeue(out var _)) + Interlocked.Decrement(ref pool[i].size); + } + } + + /// + /// Dipose pool entries from all levels + /// NOTE: + /// This is used to destroy the instance and reclaim all allocated buffer pool entries. + /// As a consequence it spin waits until totalReferences goes back down to 0 and blocks any future allocations. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() @@ -107,8 +168,8 @@ public void Dispose() #if HANGDETECT int count = 0; #endif - while (totalAllocations > int.MinValue && - Interlocked.CompareExchange(ref totalAllocations, int.MinValue, 0) != 0) + while (totalReferences > int.MinValue && + Interlocked.CompareExchange(ref totalReferences, int.MinValue, 0) != 0) { #if HANGDETECT if (++count % 10000 == 0) @@ -116,15 +177,14 @@ public void Dispose() #endif Thread.Yield(); } - for (int i = 0; i < numLevels; i++) + + for (var i = 0; i < numLevels; i++) { if (pool[i] == null) continue; while (pool[i].size > 0) { while (pool[i].items.TryDequeue(out var result)) - { Interlocked.Decrement(ref pool[i].size); - } Thread.Yield(); } pool[i] = null; @@ -132,18 +192,31 @@ public void Dispose() } /// - /// Print pool contents + /// Get statistics for this buffer pool /// - public void Print() + /// + public string GetStats() { - for (int i = 0; i < numLevels; i++) + var stats = $"totalReferences={totalReferences}," + + $"numLevels={numLevels}," + + $"maxEntriesPerLevel={maxEntriesPerLevel}," + + $"minAllocationSize={Format.MemoryBytes(minAllocationSize)}," + + $"maxAllocationSize={Format.MemoryBytes(maxAllocationSize)}," + + $"totalOutOfBoundAllocations={totalOutOfBoundAllocations}"; + + var bufferStats = ""; + var totalBufferCount = 0; + for (var i = 0; i < numLevels; i++) { - if (pool[i] == null) continue; - foreach (var item in pool[i].items) - { - Console.WriteLine(" " + item.entry.Length.ToString()); - } + if (pool[i] == null || pool[i].items.Count == 0) continue; + totalBufferCount += pool[i].items.Count; + bufferStats += $"<{pool[i].items.Count}:{Format.MemoryBytes(minAllocationSize << i)}>"; } + + if (totalBufferCount > 0) + stats += $",totalBufferCount={totalBufferCount},[" + bufferStats + "]"; + + return stats; } [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -151,15 +224,23 @@ int Position(int v) { if (v < minAllocationSize || !BitOperations.IsPow2(v)) return -1; + var level = GetLevel(minAllocationSize, v); + return level >= numLevels ? -1 : level; + } - v /= minAllocationSize; - - if (v == 1) return 0; + /// + /// Calculate level from minAllocationSize and requestedSize + /// + /// + /// + /// + public static int GetLevel(int minAllocationSize, int requestedSize) + { + Debug.Assert(BitOperations.IsPow2(minAllocationSize)); + Debug.Assert(BitOperations.IsPow2(requestedSize)); + var level = requestedSize / minAllocationSize; - int level = BitOperations.Log2((uint)v - 1) + 1; - if (level >= numLevels) - return -1; - return level; + return level == 1 ? 0 : BitOperations.Log2((uint)level - 1) + 1; } } } \ No newline at end of file diff --git a/libs/common/Metrics/InfoMetricsType.cs b/libs/common/Metrics/InfoMetricsType.cs index 302bf30205..70b7f2424f 100644 --- a/libs/common/Metrics/InfoMetricsType.cs +++ b/libs/common/Metrics/InfoMetricsType.cs @@ -72,6 +72,10 @@ public enum InfoMetricsType : byte /// Modules info /// MODULES, + /// + /// Shared buffer pool stats + /// + BPSTATS, } /// diff --git a/libs/common/NetworkBufferSettings.cs b/libs/common/NetworkBufferSettings.cs new file mode 100644 index 0000000000..f30bca0543 --- /dev/null +++ b/libs/common/NetworkBufferSettings.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Diagnostics; +using Microsoft.Extensions.Logging; + +namespace Garnet.common +{ + /// + /// Definition of NetworkBufferSettings object + /// + public class NetworkBufferSettings + { + /// + /// Send buffer size. + /// (NOTE: Send buffers are fixed and cannot grow automatically. Caller responsible for allocating correct amount and handling larger payloads.) + /// + public readonly int sendBufferSize; + + /// + /// Initial allocation size for receive network buffer. + /// (NOTE: Receive buffers can automatically grow to accommodate larger payloads.) + /// + public readonly int initialReceiveBufferSize; + + /// + /// Max allocation size for receive network buffer + /// + public readonly int maxReceiveBufferSize; + + /// + /// Default constructor + /// + public NetworkBufferSettings() : this(1 << 17, 1 << 17, 1 << 20) { } + + /// + /// Set network buffer sizes without allocating them + /// + /// + /// + /// + public NetworkBufferSettings(int sendBufferSize = 1 << 17, int initialReceiveBufferSize = 1 << 17, int maxReceiveBufferSize = 1 << 20) + { + this.sendBufferSize = sendBufferSize; + this.initialReceiveBufferSize = initialReceiveBufferSize; + this.maxReceiveBufferSize = maxReceiveBufferSize; + } + + /// + /// Allocate network buffer pool + /// + /// + /// + /// + public LimitedFixedBufferPool CreateBufferPool(int maxEntriesPerLevel = 16, ILogger logger = null) + { + var minSize = Math.Min(Math.Min(sendBufferSize, initialReceiveBufferSize), maxReceiveBufferSize); + var maxSize = Math.Max(Math.Max(sendBufferSize, initialReceiveBufferSize), maxReceiveBufferSize); + + var levels = LimitedFixedBufferPool.GetLevel(minSize, maxSize) + 1; + Debug.Assert(levels >= 0); + levels = Math.Max(4, levels); + return new LimitedFixedBufferPool(minSize, maxEntriesPerLevel: maxEntriesPerLevel, numLevels: levels, logger: logger); + } + } +} \ No newline at end of file diff --git a/libs/common/Networking/GarnetSaeaBuffer.cs b/libs/common/Networking/GarnetSaeaBuffer.cs index 6f6f575427..bf2b362aaf 100644 --- a/libs/common/Networking/GarnetSaeaBuffer.cs +++ b/libs/common/Networking/GarnetSaeaBuffer.cs @@ -25,12 +25,12 @@ public unsafe class GarnetSaeaBuffer : IDisposable /// Construct new instance /// /// Event handler - /// - public GarnetSaeaBuffer(EventHandler eventHandler, LimitedFixedBufferPool networkPool) + /// + public GarnetSaeaBuffer(EventHandler eventHandler, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool) { socketEventAsyncArgs = new SocketAsyncEventArgs(); - buffer = networkPool.Get(networkPool.MinAllocationSize); + buffer = networkPool.Get(networkBufferSettings.sendBufferSize); socketEventAsyncArgs.SetBuffer(buffer.entry, 0, buffer.entry.Length); socketEventAsyncArgs.Completed += eventHandler; } diff --git a/libs/common/Networking/GarnetTcpNetworkSender.cs b/libs/common/Networking/GarnetTcpNetworkSender.cs index d45546cf9d..6a1a77b515 100644 --- a/libs/common/Networking/GarnetTcpNetworkSender.cs +++ b/libs/common/Networking/GarnetTcpNetworkSender.cs @@ -47,7 +47,9 @@ public class GarnetTcpNetworkSender : NetworkSenderBase protected readonly int ThrottleMax = 8; readonly string remoteEndpoint; + readonly string localEndpoint; + readonly NetworkBufferSettings networkBufferSettings; readonly LimitedFixedBufferPool networkPool; /// @@ -55,36 +57,41 @@ public class GarnetTcpNetworkSender : NetworkSenderBase /// SpinLock spinLock; + private int closeRequested; + /// - /// + /// GarnetTcpNetworkSender Constructor /// /// - /// + /// /// public GarnetTcpNetworkSender( Socket socket, + NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, int throttleMax = 8) - : base(networkPool.MinAllocationSize) + : base(networkBufferSettings.sendBufferSize) { + this.networkBufferSettings = networkBufferSettings; this.networkPool = networkPool; this.socket = socket; this.saeaStack = new(2 * ThrottleMax); this.responseObject = null; this.ThrottleMax = throttleMax; this.spinLock = new(); + this.closeRequested = 0; - var endpoint = socket.RemoteEndPoint as IPEndPoint; - if (endpoint != null) - remoteEndpoint = $"{endpoint.Address}:{endpoint.Port}"; - else - remoteEndpoint = ""; + remoteEndpoint = socket.RemoteEndPoint is IPEndPoint remote ? $"{remote.Address}:{remote.Port}" : ""; + localEndpoint = socket.LocalEndPoint is IPEndPoint local ? $"{local.Address}:{local.Port}" : ""; } /// public override string RemoteEndpointName => remoteEndpoint; + /// + public override string LocalEndpointName => localEndpoint; + /// public override void Enter() { @@ -104,7 +111,7 @@ public override unsafe void EnterAndGetResponseObject(out byte* head, out byte* { if (disposed) ThrowDisposed(); - responseObject = new GarnetSaeaBuffer(SeaaBuffer_Completed, networkPool); + responseObject = new GarnetSaeaBuffer(SeaaBuffer_Completed, networkBufferSettings, networkPool); } head = responseObject.buffer.entryPtr; tail = responseObject.buffer.entryPtr + responseObject.buffer.entry.Length; @@ -138,7 +145,7 @@ public override void GetResponseObject() { if (disposed) ThrowDisposed(); - responseObject = new GarnetSaeaBuffer(SeaaBuffer_Completed, networkPool); + responseObject = new GarnetSaeaBuffer(SeaaBuffer_Completed, networkBufferSettings, networkPool); } } } @@ -257,6 +264,31 @@ public override void Throttle() throttle.Release(); } + /// + public override bool TryClose() + { + // Only one caller gets to invoke Close, as we'd expect subsequent ones to fail and throw + if (Interlocked.CompareExchange(ref closeRequested, 1, 0) != 0) + { + return false; + } + + try + { + // This close should cause all outstanding requests to fail. + // + // We don't distinguish between clients closing their end of the Socket + // and us forcing it closed on request. + socket.Close(); + } + catch + { + // Best effort, just swallow any exceptions + } + + return true; + } + [MethodImpl(MethodImplOptions.AggressiveInlining)] private unsafe void Send(Socket socket, GarnetSaeaBuffer sendObject, int offset, int size) { diff --git a/libs/common/Networking/INetworkSender.cs b/libs/common/Networking/INetworkSender.cs index c38eb9720f..0d72610213 100644 --- a/libs/common/Networking/INetworkSender.cs +++ b/libs/common/Networking/INetworkSender.cs @@ -21,6 +21,11 @@ public interface INetworkSender : IDisposable /// string RemoteEndpointName { get; } + /// + /// Local endpoint name + /// + string LocalEndpointName { get; } + /// /// Enter exclusive use of network sender. /// @@ -92,5 +97,12 @@ public interface INetworkSender : IDisposable /// Throttle mechanism for preventing too many sends outstanding (blocking) /// void Throttle(); + + /// + /// Forcibly close the underlying network connection. + /// + /// Returns true if the caller is the first to successfully call this method. + /// + bool TryClose(); } } \ No newline at end of file diff --git a/libs/common/Networking/NetworkHandler.cs b/libs/common/Networking/NetworkHandler.cs index c17cb5c198..a0481b6591 100644 --- a/libs/common/Networking/NetworkHandler.cs +++ b/libs/common/Networking/NetworkHandler.cs @@ -26,7 +26,12 @@ public abstract partial class NetworkHandler : Netw protected readonly TServerHook serverHook; /// - /// Network buffer pool + /// Network buffer settings used to allocate send and receive buffers + /// + protected readonly NetworkBufferSettings networkBufferSettings; + + /// + /// Network pool used to allocated send and receive buffers /// protected readonly LimitedFixedBufferPool networkPool; @@ -52,8 +57,6 @@ public abstract partial class NetworkHandler : Netw /// protected int networkBytesRead, networkReadHead; - - /// /// Buffer that application reads data from /// @@ -98,7 +101,7 @@ public abstract partial class NetworkHandler : Netw /// /// Constructor /// - public unsafe NetworkHandler(TServerHook serverHook, TNetworkSender networkSender, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, ILogger logger = null) + public unsafe NetworkHandler(TServerHook serverHook, TNetworkSender networkSender, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, ILogger logger = null) : base(networkPool.MinAllocationSize) { this.logger = logger; @@ -106,6 +109,7 @@ public unsafe NetworkHandler(TServerHook serverHook, TNetworkSender networkSende this.networkSender = networkSender; this.session = messageConsumer; this.readerStatus = TlsReaderStatus.Rest; + this.networkBufferSettings = networkBufferSettings; this.networkPool = networkPool; if (!useTLS) @@ -125,11 +129,11 @@ public unsafe NetworkHandler(TServerHook serverHook, TNetworkSender networkSende expectingData = new SemaphoreSlim(0); cancellationTokenSource = new(); - transportReceiveBufferEntry = networkPool.Get(networkPool.MinAllocationSize); + transportReceiveBufferEntry = this.networkPool.Get(this.networkBufferSettings.initialReceiveBufferSize); transportReceiveBuffer = transportReceiveBufferEntry.entry; transportReceiveBufferPtr = transportReceiveBufferEntry.entryPtr; - transportSendBufferEntry = networkPool.Get(networkPool.MinAllocationSize); + transportSendBufferEntry = this.networkPool.Get(this.networkBufferSettings.sendBufferSize); transportSendBuffer = transportSendBufferEntry.entry; transportSendBufferPtr = transportSendBufferEntry.entryPtr; } diff --git a/libs/common/Networking/NetworkSenderBase.cs b/libs/common/Networking/NetworkSenderBase.cs index dbb1227cfb..1971ce458e 100644 --- a/libs/common/Networking/NetworkSenderBase.cs +++ b/libs/common/Networking/NetworkSenderBase.cs @@ -46,6 +46,9 @@ public NetworkSenderBase(int serverBufferSize) /// public abstract string RemoteEndpointName { get; } + /// + public abstract string LocalEndpointName { get; } + /// public abstract void Enter(); @@ -87,5 +90,8 @@ public NetworkSenderBase(int serverBufferSize) /// public abstract void Throttle(); + + /// + public abstract bool TryClose(); } } \ No newline at end of file diff --git a/libs/common/Networking/TcpNetworkHandler.cs b/libs/common/Networking/TcpNetworkHandler.cs index 874a607e44..65469d82fa 100644 --- a/libs/common/Networking/TcpNetworkHandler.cs +++ b/libs/common/Networking/TcpNetworkHandler.cs @@ -17,8 +17,8 @@ public abstract class TcpNetworkHandler : TcpNetworkHandlerBase /// Constructor /// - public TcpNetworkHandler(TServerHook serverHook, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, int networkSendThrottleMax = 8, ILogger logger = null) - : base(serverHook, new GarnetTcpNetworkSender(socket, networkPool, networkSendThrottleMax), socket, networkPool, useTLS, messageConsumer, logger) + public TcpNetworkHandler(TServerHook serverHook, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, int networkSendThrottleMax = 8, ILogger logger = null) + : base(serverHook, new GarnetTcpNetworkSender(socket, networkBufferSettings, networkPool, networkSendThrottleMax), socket, networkBufferSettings, networkPool, useTLS, messageConsumer: messageConsumer, logger: logger) { } } diff --git a/libs/common/Networking/TcpNetworkHandlerBase.cs b/libs/common/Networking/TcpNetworkHandlerBase.cs index e5bcb5e7c3..8f2acf802a 100644 --- a/libs/common/Networking/TcpNetworkHandlerBase.cs +++ b/libs/common/Networking/TcpNetworkHandlerBase.cs @@ -24,26 +24,31 @@ public abstract class TcpNetworkHandlerBase : Netwo readonly ILogger logger; readonly Socket socket; readonly string remoteEndpoint; + readonly string localEndpoint; + int closeRequested; /// /// Constructor /// - public TcpNetworkHandlerBase(TServerHook serverHook, TNetworkSender networkSender, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, ILogger logger = null) - : base(serverHook, networkSender, networkPool, useTLS, messageConsumer, logger) + public TcpNetworkHandlerBase(TServerHook serverHook, TNetworkSender networkSender, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, IMessageConsumer messageConsumer = null, ILogger logger = null) + : base(serverHook, networkSender, networkBufferSettings, networkPool, useTLS, messageConsumer: messageConsumer, logger: logger) { this.logger = logger; this.socket = socket; - var endpoint = socket.RemoteEndPoint as IPEndPoint; - if (endpoint != null) - remoteEndpoint = $"{endpoint.Address}:{endpoint.Port}"; - else - remoteEndpoint = ""; + this.closeRequested = 0; + + remoteEndpoint = socket.RemoteEndPoint is IPEndPoint remote ? $"{remote.Address}:{remote.Port}" : ""; + localEndpoint = socket.LocalEndPoint is IPEndPoint local ? $"{local.Address}:{local.Port}" : ""; + AllocateNetworkReceiveBuffer(); } /// public override string RemoteEndpointName => remoteEndpoint; + /// + public override string LocalEndpointName => localEndpoint; + /// public override void Start(SslServerAuthenticationOptions tlsOptions = null, string remoteEndpointName = null, CancellationToken token = default) { @@ -72,6 +77,31 @@ public override async Task StartAsync(SslClientAuthenticationOptions tlsOptions, await base.StartAsync(tlsOptions, remoteEndpointName, token).ConfigureAwait(false); } + /// + public override bool TryClose() + { + // Only one caller gets to invoke Close, as we'd expect subsequent ones to fail and throw + if (Interlocked.CompareExchange(ref closeRequested, 0, 1) != 0) + { + return false; + } + + try + { + // This close should cause all outstanding requests to fail. + // + // We don't distinguish between clients closing their end of the Socket + // and us forcing it closed on request. + socket.Close(); + } + catch + { + // Best effort, just swallow any exceptions + } + + return true; + } + void Start() { var receiveEventArgs = new SocketAsyncEventArgs { AcceptSocket = socket }; @@ -91,7 +121,6 @@ void Start() } } - /// public override void Dispose() { @@ -133,7 +162,7 @@ void RecvEventArg_Completed(object sender, SocketAsyncEventArgs e) unsafe void AllocateNetworkReceiveBuffer() { - networkReceiveBufferEntry = networkPool.Get(networkPool.MinAllocationSize); + networkReceiveBufferEntry = networkPool.Get(networkBufferSettings.initialReceiveBufferSize); networkReceiveBuffer = networkReceiveBufferEntry.entry; networkReceiveBufferPtr = networkReceiveBufferEntry.entryPtr; } diff --git a/libs/common/RespReadUtils.cs b/libs/common/RespReadUtils.cs index 3d8cf468c8..2ae39f723a 100644 --- a/libs/common/RespReadUtils.cs +++ b/libs/common/RespReadUtils.cs @@ -856,7 +856,7 @@ public static bool ReadStringArrayResponseWithLengthHeader(out string[] result, { if (*ptr == '$') { - if (!ReadStringWithLengthHeader(out result[i], ref ptr, end)) + if (!ReadStringResponseWithLengthHeader(out result[i], ref ptr, end)) return false; } else if (*ptr == '+') diff --git a/libs/common/RespWriteUtils.cs b/libs/common/RespWriteUtils.cs index 3650823d6c..0264d668fd 100644 --- a/libs/common/RespWriteUtils.cs +++ b/libs/common/RespWriteUtils.cs @@ -78,6 +78,18 @@ public static bool WriteArrayLength(int len, ref byte* curr, byte* end) return true; } + public static bool WriteArrayLength(int len, ref byte* curr, byte* end, out int numDigits, out int totalLen) + { + numDigits = NumUtils.NumDigits(len); + totalLen = 1 + numDigits + 2; + if (totalLen > (int)(end - curr)) + return false; + *curr++ = (byte)'*'; + NumUtils.IntToBytes(len, numDigits, ref curr); + WriteNewline(ref curr); + return true; + } + /// /// Write array item /// diff --git a/libs/common/StreamProvider.cs b/libs/common/StreamProvider.cs index 05b145b7b9..2df86abd4f 100644 --- a/libs/common/StreamProvider.cs +++ b/libs/common/StreamProvider.cs @@ -119,7 +119,7 @@ public class StreamProviderFactory /// /// Type of location of files the stream provider reads from / writes to /// Connection string to Azure Storage, if applicable - /// Assembly from which to load the embedded resource, if applicable + /// Assembly from which to load the embedded resource, if applicable. /// StreamProvider instance public static IStreamProvider GetStreamProvider(FileLocationType locationType, string connectionString = null, Assembly resourceAssembly = null) { @@ -127,11 +127,14 @@ public static IStreamProvider GetStreamProvider(FileLocationType locationType, s { case FileLocationType.AzureStorage: if (string.IsNullOrEmpty(connectionString)) - throw new ArgumentException("Azure Storage connection string is required to read/write settings to Azure Storage", nameof(connectionString)); + throw new ArgumentException("Azure Storage connection string is required to read/write to Azure Storage", nameof(connectionString)); return new AzureStreamProvider(connectionString); case FileLocationType.Local: return new LocalFileStreamProvider(); case FileLocationType.EmbeddedResource: + if (resourceAssembly == null) + throw new ArgumentException( + "Assembly is required to read from embedded resource", nameof(resourceAssembly)); return new EmbeddedResourceStreamProvider(resourceAssembly); default: throw new NotImplementedException(); diff --git a/libs/host/Configuration/Options.cs b/libs/host/Configuration/Options.cs index 85c7b62782..1bae3a7d84 100644 --- a/libs/host/Configuration/Options.cs +++ b/libs/host/Configuration/Options.cs @@ -52,7 +52,7 @@ internal sealed class Options public string SegmentSize { get; set; } [MemorySizeValidation] - [Option('i', "index", Required = false, HelpText = "Size of hash index in bytes (rounds down to power of 2)")] + [Option('i', "index", Required = false, HelpText = "Start size of hash index in bytes (rounds down to power of 2)")] public string IndexSize { get; set; } [MemorySizeValidation(false)] @@ -64,11 +64,11 @@ internal sealed class Options public int MutablePercent { get; set; } [MemorySizeValidation(false)] - [Option("obj-total-memory", Required = false, HelpText = "Total object store log memory used including heap memory in bytes")] - public string ObjectStoreTotalMemorySize { get; set; } + [Option("obj-heap-memory", Required = false, HelpText = "Object store heap memory size in bytes (Sum of size taken up by all object instances in the heap)")] + public string ObjectStoreHeapMemorySize { get; set; } [MemorySizeValidation] - [Option("obj-memory", Required = false, HelpText = "Object store log memory used in bytes excluding heap memory")] + [Option("obj-log-memory", Required = false, HelpText = "Object store log memory used in bytes (Size of only the log with references to heap objects, excludes size of heap memory consumed by the objects themselves referred to from the log)")] public string ObjectStoreLogMemorySize { get; set; } [MemorySizeValidation] @@ -80,7 +80,7 @@ internal sealed class Options public string ObjectStoreSegmentSize { get; set; } [MemorySizeValidation] - [Option("obj-index", Required = false, HelpText = "Size of object store hash index in bytes (rounds down to power of 2)")] + [Option("obj-index", Required = false, HelpText = "Start size of object store hash index in bytes (rounds down to power of 2)")] public string ObjectStoreIndexSize { get; set; } [MemorySizeValidation(false)] @@ -139,7 +139,7 @@ internal sealed class Options [Option("clean-cluster-config", Required = false, HelpText = "Start with clean cluster config.")] public bool? CleanClusterConfig { get; set; } - [Option("auth", Required = false, HelpText = "Authentication mode of Garnet. This impacts how AUTH command is processed and how clients are authenticated against Garnet. Value options: NoAuth, Password, Aad, ACL")] + [Option("auth", Required = false, Default = GarnetAuthenticationMode.ACL, HelpText = "Authentication mode of Garnet. This impacts how AUTH command is processed and how clients are authenticated against Garnet. Value options: NoAuth, Password, Aad, ACL")] public GarnetAuthenticationMode AuthenticationMode { get; set; } [Option("password", Required = false, HelpText = "Authentication string for password authentication.")] @@ -194,6 +194,14 @@ internal sealed class Options [Option("aof-size-limit", Required = false, HelpText = "Maximum size of AOF (rounds down to power of 2) after which unsafe truncation will be applied. Left empty AOF will grow without bound unless a checkpoint is taken")] public string AofSizeLimit { get; set; } + [IntRangeValidation(0, int.MaxValue)] + [Option("aof-refresh-freq", Required = false, HelpText = "AOF replication (safe tail address) refresh frequency in milliseconds. 0 = auto refresh after every enqueue.")] + public int AofReplicationRefreshFrequencyMs { get; set; } + + [IntRangeValidation(0, int.MaxValue)] + [Option("subscriber-refresh-freq", Required = false, HelpText = "Subscriber (safe tail address) refresh frequency in milliseconds (for pub-sub). 0 = auto refresh after every enqueue.")] + public int SubscriberRefreshFrequencyMs { get; set; } + [IntRangeValidation(0, int.MaxValue)] [Option("compaction-freq", Required = false, HelpText = "Background hybrid log compaction frequency in seconds. 0 = disabled (compaction performed before checkpointing instead)")] public int CompactionFrequencySecs { get; set; } @@ -437,6 +445,10 @@ internal sealed class Options [Option("extension-bin-paths", Separator = ',', Required = false, HelpText = "List of directories on server from which custom command binaries can be loaded by admin users")] public IEnumerable ExtensionBinPaths { get; set; } + [ModuleFilePathValidation(true, true, false)] + [Option("loadmodulecs", Separator = ',', Required = false, HelpText = "List of modules to be loaded")] + public IEnumerable LoadModuleCS { get; set; } + [Option("extension-allow-unsigned", Required = false, HelpText = "Allow loading custom commands from digitally unsigned assemblies (not recommended)")] public bool? ExtensionAllowUnsignedAssemblies { get; set; } @@ -563,7 +575,7 @@ public GarnetServerOptions GetServerOptions(ILogger logger = null) IndexSize = IndexSize, IndexMaxSize = IndexMaxSize, MutablePercent = MutablePercent, - ObjectStoreTotalMemorySize = ObjectStoreTotalMemorySize, + ObjectStoreHeapMemorySize = ObjectStoreHeapMemorySize, ObjectStoreLogMemorySize = ObjectStoreLogMemorySize, ObjectStorePageSize = ObjectStorePageSize, ObjectStoreSegmentSize = ObjectStoreSegmentSize, @@ -588,6 +600,7 @@ public GarnetServerOptions GetServerOptions(ILogger logger = null) LuaTransactionMode = LuaTransactionMode.GetValueOrDefault(), AofMemorySize = AofMemorySize, AofPageSize = AofPageSize, + AofReplicationRefreshFrequencyMs = AofReplicationRefreshFrequencyMs, CommitFrequencyMs = CommitFrequencyMs, WaitForCommit = WaitForCommit.GetValueOrDefault(), AofSizeLimit = AofSizeLimit, @@ -644,7 +657,8 @@ public GarnetServerOptions GetServerOptions(ILogger logger = null) ExtensionBinPaths = ExtensionBinPaths?.ToArray(), ExtensionAllowUnsignedAssemblies = ExtensionAllowUnsignedAssemblies.GetValueOrDefault(), IndexResizeFrequencySecs = IndexResizeFrequencySecs, - IndexResizeThreshold = IndexResizeThreshold + IndexResizeThreshold = IndexResizeThreshold, + LoadModuleCS = LoadModuleCS }; } diff --git a/libs/host/Configuration/OptionsValidators.cs b/libs/host/Configuration/OptionsValidators.cs index 64cc09a9d2..ea7f64682e 100644 --- a/libs/host/Configuration/OptionsValidators.cs +++ b/libs/host/Configuration/OptionsValidators.cs @@ -277,6 +277,41 @@ protected override ValidationResult IsValid(object value, ValidationContext vali } } + [AttributeUsage(AttributeTargets.Property)] + internal class ModuleFilePathValidationAttribute : FilePathValidationAttribute + { + internal ModuleFilePathValidationAttribute(bool fileMustExist, bool directoryMustExist, bool isRequired, string[] acceptedFileExtensions = null) : base(fileMustExist, directoryMustExist, isRequired, acceptedFileExtensions) + { + } + + protected override ValidationResult IsValid(object value, ValidationContext validationContext) + { + if (TryInitialValidation>(value, validationContext, out var initValidationResult, out var filePaths)) + return initValidationResult; + + var errorSb = new StringBuilder(); + var isValid = true; + foreach (var filePathArg in filePaths) + { + var filePath = filePathArg.Split(' ')[0]; + var result = base.IsValid(filePath, validationContext); + if (result != null && result != ValidationResult.Success) + { + isValid = false; + errorSb.AppendLine(result.ErrorMessage); + } + } + + if (!isValid) + { + var errorMessage = $"Error(s) validating one or more file paths:{Environment.NewLine}{errorSb}"; + return new ValidationResult(errorMessage, [validationContext.MemberName]); + } + + return ValidationResult.Success; + } + } + /// /// Validation logic for a string representing an IP address (either IPv4 or IPv6) /// diff --git a/libs/host/GarnetServer.cs b/libs/host/GarnetServer.cs index 6af117b109..ce81c77b10 100644 --- a/libs/host/GarnetServer.cs +++ b/libs/host/GarnetServer.cs @@ -4,11 +4,14 @@ using System; using System.Diagnostics; using System.IO; +using System.Linq; +using System.Text; using System.Threading; using Garnet.cluster; using Garnet.common; using Garnet.networking; using Garnet.server; +using Garnet.server.Auth.Settings; using Microsoft.Extensions.Logging; using Tsavorite.core; @@ -34,7 +37,6 @@ public class GarnetServer : IDisposable private IDevice aofDevice; private TsavoriteLog appendOnlyFile; private SubscribeBroker> subscribeBroker; - private CollectionItemBroker itemBroker; private KVSettings kvSettings; private KVSettings objKvSettings; private INamedDeviceFactory logFactory; @@ -50,7 +52,7 @@ public class GarnetServer : IDisposable protected StoreWrapper storeWrapper; // IMPORTANT: Keep the version in sync with .azure\pipelines\azure-pipelines-external-release.yml line ~6. - readonly string version = "1.0.19"; + readonly string version = "1.0.30"; /// /// Resp protocol version @@ -77,7 +79,9 @@ public class GarnetServer : IDisposable /// /// Command line arguments /// Logger factory - public GarnetServer(string[] commandLineArgs, ILoggerFactory loggerFactory = null, bool cleanupDir = false) + /// Clean up directory. + /// Override for custom authentication settings. + public GarnetServer(string[] commandLineArgs, ILoggerFactory loggerFactory = null, bool cleanupDir = false, IAuthenticationSettings authenticationSettingsOverride = null) { Trace.Listeners.Add(new ConsoleTraceListener()); @@ -126,6 +130,7 @@ public GarnetServer(string[] commandLineArgs, ILoggerFactory loggerFactory = nul // Assign values to GarnetServerOptions this.opts = serverSettings.GetServerOptions(this.loggerFactory.CreateLogger("Options")); + this.opts.AuthSettings = authenticationSettingsOverride ?? this.opts.AuthSettings; this.cleanupDir = cleanupDir; this.InitializeServer(); } @@ -184,15 +189,23 @@ private void InitializeServer() throw new Exception($"Unable to call ThreadPool.SetMaxThreads with {opts.ThreadPoolMaxThreads}"); CreateMainStore(clusterFactory, out var checkpointDir); - CreateObjectStore(clusterFactory, customCommandManager, checkpointDir, out var objectStoreSizeTracker, out itemBroker); + CreateObjectStore(clusterFactory, customCommandManager, checkpointDir, out var objectStoreSizeTracker); if (!opts.DisablePubSub) - subscribeBroker = new SubscribeBroker>(new SpanByteKeySerializer(), null, opts.PubSubPageSizeBytes(), true); + subscribeBroker = new SubscribeBroker>(new SpanByteKeySerializer(), null, opts.PubSubPageSizeBytes(), opts.SubscriberRefreshFrequencyMs, true); CreateAOF(); logger?.LogTrace("TLS is {tlsEnabled}", opts.TlsOptions == null ? "disabled" : "enabled"); + if (logger != null) + { + var configMemoryLimit = (store.IndexSize * 64) + store.Log.MaxMemorySizeBytes + (store.ReadCache?.MaxMemorySizeBytes ?? 0) + (appendOnlyFile?.MaxMemorySizeBytes ?? 0); + if (objectStore != null) + configMemoryLimit += objectStore.IndexSize * 64 + objectStore.Log.MaxMemorySizeBytes + (objectStore.ReadCache?.MaxMemorySizeBytes ?? 0) + (objectStoreSizeTracker?.TargetSize ?? 0); + logger.LogInformation("Total configured memory limit: {configMemoryLimit}", configMemoryLimit); + } + // Create Garnet TCP server if none was provided. this.server ??= new GarnetServerTcp(opts.Address, opts.Port, 0, opts.TlsOptions, opts.NetworkSendThrottleMax, logger); @@ -200,7 +213,7 @@ private void InitializeServer() customCommandManager, appendOnlyFile, opts, clusterFactory: clusterFactory, loggerFactory: loggerFactory); // Create session provider for Garnet - Provider = new GarnetProvider(storeWrapper, subscribeBroker, itemBroker); + Provider = new GarnetProvider(storeWrapper, subscribeBroker); // Create user facing API endpoints Metrics = new MetricsApi(Provider); @@ -208,6 +221,32 @@ private void InitializeServer() Store = new StoreApi(storeWrapper); server.Register(WireFormat.ASCII, Provider); + + LoadModules(customCommandManager); + } + + private void LoadModules(CustomCommandManager customCommandManager) + { + if (opts.LoadModuleCS == null) + return; + + foreach (var moduleCS in opts.LoadModuleCS) + { + var moduleCSData = moduleCS.Split(' ', StringSplitOptions.RemoveEmptyEntries); + if (moduleCSData.Length < 1) + continue; + + var modulePath = moduleCSData[0]; + var moduleArgs = moduleCSData.Length > 1 ? moduleCSData.Skip(1).ToArray() : []; + if (ModuleUtils.LoadAssemblies([modulePath], null, true, out var loadedAssemblies, out var errorMsg)) + { + ModuleRegistrar.Instance.LoadModule(customCommandManager, loadedAssemblies.ToList()[0], moduleArgs, logger, out errorMsg); + } + else + { + logger?.LogError("Module {0} failed to load with error {1}", modulePath, Encoding.UTF8.GetString(errorMsg)); + } + } } private void CreateMainStore(IClusterFactory clusterFactory, out string checkpointDir) @@ -237,33 +276,37 @@ private void CreateMainStore(IClusterFactory clusterFactory, out string checkpoi , (allocatorSettings, storeFunctions) => new(allocatorSettings, storeFunctions)); } - private void CreateObjectStore(IClusterFactory clusterFactory, CustomCommandManager customCommandManager, string CheckpointDir, out CacheSizeTracker objectStoreSizeTracker, out CollectionItemBroker itemBroker) + private void CreateObjectStore(IClusterFactory clusterFactory, CustomCommandManager customCommandManager, string CheckpointDir, out CacheSizeTracker objectStoreSizeTracker) { objectStoreSizeTracker = null; - itemBroker = null; if (!opts.DisableObjects) { - objKvSettings = opts.GetObjectStoreSettings(this.loggerFactory?.CreateLogger("TsavoriteKV [obj]"), out var objTotalMemorySize); + objKvSettings = opts.GetObjectStoreSettings(this.loggerFactory?.CreateLogger("TsavoriteKV [obj]"), + out var objHeapMemorySize); // Run checkpoint on its own thread to control p99 objKvSettings.ThrottleCheckpointFlushDelayMs = opts.CheckpointThrottleFlushDelayMs; objKvSettings.CheckpointVersionSwitchBarrier = opts.EnableCluster; if (opts.EnableCluster) - objKvSettings.CheckpointManager = clusterFactory.CreateCheckpointManager(opts.DeviceFactoryCreator(), - new DefaultCheckpointNamingScheme(CheckpointDir + "/ObjectStore/checkpoints"), isMainStore: false, logger); + objKvSettings.CheckpointManager = clusterFactory.CreateCheckpointManager( + opts.DeviceFactoryCreator(), + new DefaultCheckpointNamingScheme(CheckpointDir + "/ObjectStore/checkpoints"), + isMainStore: false, logger); else objKvSettings.CheckpointManager = new DeviceLogCommitCheckpointManager(opts.DeviceFactoryCreator(), - new DefaultCheckpointNamingScheme(CheckpointDir + "/ObjectStore/checkpoints"), removeOutdated: true); + new DefaultCheckpointNamingScheme(CheckpointDir + "/ObjectStore/checkpoints"), + removeOutdated: true); objectStore = new(objKvSettings - , StoreFunctions.Create(new ByteArrayKeyComparer(), () => new ByteArrayBinaryObjectSerializer(), () => new GarnetObjectSerializer(customCommandManager)) + , StoreFunctions.Create(new ByteArrayKeyComparer(), + () => new ByteArrayBinaryObjectSerializer(), + () => new GarnetObjectSerializer(customCommandManager)) , (allocatorSettings, storeFunctions) => new(allocatorSettings, storeFunctions)); - if (objTotalMemorySize > 0) - objectStoreSizeTracker = new CacheSizeTracker(objectStore, objKvSettings, objTotalMemorySize, this.loggerFactory); - - itemBroker = new CollectionItemBroker(); + if (objHeapMemorySize > 0) + objectStoreSizeTracker = new CacheSizeTracker(objectStore, objKvSettings, objHeapMemorySize, + this.loggerFactory); } } @@ -331,7 +374,6 @@ private void InternalDispose() Provider?.Dispose(); server.Dispose(); subscribeBroker?.Dispose(); - itemBroker?.Dispose(); store.Dispose(); appendOnlyFile?.Dispose(); aofDevice?.Dispose(); diff --git a/libs/host/defaults.conf b/libs/host/defaults.conf index 97b15df51a..27e700c966 100644 --- a/libs/host/defaults.conf +++ b/libs/host/defaults.conf @@ -18,7 +18,7 @@ /* Size of each log segment in bytes on disk (rounds down to power of 2) */ "SegmentSize" : "1g", - /* Size of hash index in bytes (rounds down to power of 2) */ + /* Start size of hash index in bytes (rounds down to power of 2) */ "IndexSize" : "128m", /* Max size of hash index in bytes (rounds down to power of 2) */ @@ -27,10 +27,10 @@ /* Percentage of log memory that is kept mutable */ "MutablePercent" : 90, - /* Total object store log memory used including heap memory in bytes */ - "ObjectStoreTotalMemorySize" : "", + /* Object store heap memory size in bytes (Sum of size taken up by all object instances in the heap) */ + "ObjectStoreHeapMemorySize" : "", - /* Object store log memory used in bytes excluding heap memory */ + /* Object store log memory used in bytes (Size of only the log with references to heap objects, excludes size of heap memory consumed by the objects themselves referred to from the log) */ "ObjectStoreLogMemorySize" : "32m", /* Size of each object store page in bytes (rounds down to power of 2) */ @@ -39,7 +39,7 @@ /* Size of each object store log segment in bytes on disk (rounds down to power of 2) */ "ObjectStoreSegmentSize" : "32m", - /* Size of object store hash index in bytes (rounds down to power of 2) */ + /* Start size of object store hash index in bytes (rounds down to power of 2) */ "ObjectStoreIndexSize" : "16m", /* Max size of object store hash index in bytes (rounds down to power of 2) */ @@ -123,6 +123,12 @@ /* Size of each AOF page in bytes(rounds down to power of 2) */ "AofPageSize" : "4m", + /* AOF replication (safe tail address) refresh frequency in milliseconds. 0 = auto refresh after every enqueue. */ + "AofReplicationRefreshFrequencyMs": 10, + + /* Subscriber (safe tail address) refresh frequency in milliseconds (for pub-sub). 0 = auto refresh after every enqueue. */ + "SubscriberRefreshFrequencyMs": 0, + /* Write ahead logging (append-only file) commit issue frequency in milliseconds. 0 = issue an immediate commit per operation, -1 = manually issue commits using COMMITAOF command */ "CommitFrequencyMs" : 0, @@ -298,4 +304,7 @@ /* Overflow bucket count over total index size in percentage to trigger index resize */ "IndexResizeThreshold": 50, + + /* List of module paths to be loaded at startup */ + "LoadModuleCS": null } \ No newline at end of file diff --git a/libs/resources/Garnet.resources.csproj b/libs/resources/Garnet.resources.csproj new file mode 100644 index 0000000000..f09ccdcb93 --- /dev/null +++ b/libs/resources/Garnet.resources.csproj @@ -0,0 +1,19 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + + diff --git a/libs/resources/RespCommandsDocs.json b/libs/resources/RespCommandsDocs.json new file mode 100644 index 0000000000..5d1e136100 --- /dev/null +++ b/libs/resources/RespCommandsDocs.json @@ -0,0 +1,5788 @@ +[ + { + "Command": "ACL", + "Name": "ACL", + "Summary": "A container for Access List Control commands.", + "Group": "Server", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "ACL_USERS", + "Name": "ACL|USERS", + "Summary": "Lists all ACL users.", + "Group": "Server", + "Complexity": "O(N). Where N is the number of configured users." + }, + { + "Command": "ACL_SAVE", + "Name": "ACL|SAVE", + "Summary": "Saves the effective ACL rules in the configured ACL file.", + "Group": "Server", + "Complexity": "O(N). Where N is the number of configured users." + }, + { + "Command": "ACL_LIST", + "Name": "ACL|LIST", + "Summary": "Dumps the effective rules in ACL file format.", + "Group": "Server", + "Complexity": "O(N). Where N is the number of configured users." + }, + { + "Command": "ACL_CAT", + "Name": "ACL|CAT", + "Summary": "Lists the ACL categories, or the commands inside a category.", + "Group": "Server", + "Complexity": "O(1) since the categories and commands are a fixed set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CATEGORY", + "DisplayText": "category", + "Type": "String", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ACL_LOAD", + "Name": "ACL|LOAD", + "Summary": "Reloads the rules from the configured ACL file.", + "Group": "Server", + "Complexity": "O(N). Where N is the number of configured users." + }, + { + "Command": "ACL_WHOAMI", + "Name": "ACL|WHOAMI", + "Summary": "Returns the authenticated username of the current connection.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "ACL_DELUSER", + "Name": "ACL|DELUSER", + "Summary": "Deletes ACL users, and terminates their connections.", + "Group": "Server", + "Complexity": "O(1) amortized time considering the typical user.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "ACL_SETUSER", + "Name": "ACL|SETUSER", + "Summary": "Creates and modifies an ACL user and its rules.", + "Group": "Server", + "Complexity": "O(N). Where N is the number of rules provided.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RULE", + "DisplayText": "rule", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + } + ] + }, + { + "Command": "APPEND", + "Name": "APPEND", + "Summary": "Appends a string to the value of a key. Creates the key if it doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1). The amortized time complexity is O(1) assuming the appended value is small and the already present value is of any size, since the dynamic string library used by Redis will double the free space available on every reallocation.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "ASKING", + "Name": "ASKING", + "Summary": "Signals that a cluster client is following an -ASK redirect.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "ASYNC", + "Name": "ASYNC", + "Summary": "Start, stop or issue a barrier command for a series of async operations.", + "Group": "Generic", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ON", + "DisplayText": "on", + "Type": "PureToken", + "Token": "ON" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFF", + "DisplayText": "off", + "Type": "PureToken", + "Token": "OFF" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BARRIER", + "DisplayText": "barrier", + "Type": "PureToken", + "Token": "BARRIER" + } + ] + } + ] + }, + { + "Command": "AUTH", + "Name": "AUTH", + "Summary": "Authenticates the connection.", + "Group": "Connection", + "Complexity": "O(N) where N is the number of passwords defined for the user", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PASSWORD", + "DisplayText": "password", + "Type": "String" + } + ] + }, + { + "Command": "BGSAVE", + "Name": "BGSAVE", + "Summary": "Asynchronously saves the database(s) to disk.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SCHEDULE", + "DisplayText": "schedule", + "Type": "PureToken", + "Token": "SCHEDULE", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "BITCOUNT", + "Name": "BITCOUNT", + "Summary": "Counts the number of set bits (population counting) in a string.", + "Group": "Bitmap", + "Complexity": "O(N)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "RANGE", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "END", + "DisplayText": "end", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "UNIT", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BYTE", + "DisplayText": "byte", + "Type": "PureToken", + "Token": "BYTE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BIT", + "DisplayText": "bit", + "Type": "PureToken", + "Token": "BIT" + } + ] + } + ] + } + ] + }, + { + "Command": "BITFIELD", + "Name": "BITFIELD", + "Summary": "Performs arbitrary bitfield integer operations on strings.", + "Group": "Bitmap", + "Complexity": "O(1) for each subcommand specified", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OPERATION", + "Type": "OneOf", + "ArgumentFlags": "Optional, Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "GET-BLOCK", + "Type": "Block", + "Token": "GET", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ENCODING", + "DisplayText": "encoding", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WRITE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OVERFLOW-BLOCK", + "Type": "OneOf", + "Token": "OVERFLOW", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WRAP", + "DisplayText": "wrap", + "Type": "PureToken", + "Token": "WRAP" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SAT", + "DisplayText": "sat", + "Type": "PureToken", + "Token": "SAT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FAIL", + "DisplayText": "fail", + "Type": "PureToken", + "Token": "FAIL" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WRITE-OPERATION", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "SET-BLOCK", + "Type": "Block", + "Token": "SET", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ENCODING", + "DisplayText": "encoding", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "INCRBY-BLOCK", + "Type": "Block", + "Token": "INCRBY", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ENCODING", + "DisplayText": "encoding", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "Integer" + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "Command": "BITFIELD_RO", + "Name": "BITFIELD_RO", + "Summary": "Performs arbitrary read-only bitfield integer operations on strings.", + "Group": "Bitmap", + "Complexity": "O(1) for each subcommand specified", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "GET-BLOCK", + "Type": "Block", + "Token": "GET", + "ArgumentFlags": "Optional, Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ENCODING", + "DisplayText": "encoding", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + } + ] + } + ] + }, + { + "Command": "BITOP", + "Name": "BITOP", + "Summary": "Performs bitwise operations on multiple strings, and stores the result.", + "Group": "Bitmap", + "Complexity": "O(N)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OPERATION", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "AND", + "DisplayText": "and", + "Type": "PureToken", + "Token": "AND" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OR", + "DisplayText": "or", + "Type": "PureToken", + "Token": "OR" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XOR", + "DisplayText": "xor", + "Type": "PureToken", + "Token": "XOR" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NOT", + "DisplayText": "not", + "Type": "PureToken", + "Token": "NOT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTKEY", + "DisplayText": "destkey", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "BITPOS", + "Name": "BITPOS", + "Summary": "Finds the first set (1) or clear (0) bit in a string.", + "Group": "Bitmap", + "Complexity": "O(N)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BIT", + "DisplayText": "bit", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "RANGE", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "END-UNIT-BLOCK", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "END", + "DisplayText": "end", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "UNIT", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BYTE", + "DisplayText": "byte", + "Type": "PureToken", + "Token": "BYTE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BIT", + "DisplayText": "bit", + "Type": "PureToken", + "Token": "BIT" + } + ] + } + ] + } + ] + } + ] + }, + { + "Command": "BLMOVE", + "Name": "BLMOVE", + "Summary": "Pops an element from a list, pushes it to another list and returns it. Blocks until an element is available otherwise. Deletes the list if the last element was moved.", + "Group": "List", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "SOURCE", + "DisplayText": "source", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 1 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHEREFROM", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LEFT", + "DisplayText": "left", + "Type": "PureToken", + "Token": "LEFT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RIGHT", + "DisplayText": "right", + "Type": "PureToken", + "Token": "RIGHT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHERETO", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LEFT", + "DisplayText": "left", + "Type": "PureToken", + "Token": "LEFT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RIGHT", + "DisplayText": "right", + "Type": "PureToken", + "Token": "RIGHT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TIMEOUT", + "DisplayText": "timeout", + "Type": "Double" + } + ] + }, + { + "Command": "BLPOP", + "Name": "BLPOP", + "Summary": "Removes and returns the first element in a list. Blocks until an element is available otherwise. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(N) where N is the number of provided keys.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TIMEOUT", + "DisplayText": "timeout", + "Type": "Double" + } + ] + }, + { + "Command": "BRPOP", + "Name": "BRPOP", + "Summary": "Removes and returns the last element in a list. Blocks until an element is available otherwise. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(N) where N is the number of provided keys.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TIMEOUT", + "DisplayText": "timeout", + "Type": "Double" + } + ] + }, + { + "Command": "CLIENT", + "Name": "CLIENT", + "Summary": "A container for client connection commands.", + "Group": "Connection", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "CLIENT_INFO", + "Name": "CLIENT|INFO", + "Summary": "Returns information about the connection.", + "Group": "Connection", + "Complexity": "O(1)" + }, + { + "Command": "CLIENT_KILL", + "Name": "CLIENT|KILL", + "Summary": "Terminates open connections.", + "Group": "Connection", + "Complexity": "O(N) where N is the number of client connections", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "FILTER", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OLD-FORMAT", + "DisplayText": "ip:port", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "NEW-FORMAT", + "Type": "OneOf", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLIENT-ID", + "DisplayText": "client-id", + "Type": "Integer", + "Token": "ID", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CLIENT-TYPE", + "Type": "OneOf", + "Token": "TYPE", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NORMAL", + "DisplayText": "normal", + "Type": "PureToken", + "Token": "NORMAL" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MASTER", + "DisplayText": "master", + "Type": "PureToken", + "Token": "MASTER" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLAVE", + "DisplayText": "slave", + "Type": "PureToken", + "Token": "SLAVE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "REPLICA", + "DisplayText": "replica", + "Type": "PureToken", + "Token": "REPLICA" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PUBSUB", + "DisplayText": "pubsub", + "Type": "PureToken", + "Token": "PUBSUB" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String", + "Token": "USER", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ADDR", + "DisplayText": "ip:port", + "Type": "String", + "Token": "ADDR", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LADDR", + "DisplayText": "ip:port", + "Type": "String", + "Token": "LADDR", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "SKIPME", + "Type": "OneOf", + "Token": "SKIPME", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "YES", + "DisplayText": "yes", + "Type": "PureToken", + "Token": "YES" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NO", + "DisplayText": "no", + "Type": "PureToken", + "Token": "NO" + } + ] + } + ] + } + ] + } + ] + }, + { + "Command": "CLIENT_ID", + "Name": "CLIENT|ID", + "Summary": "Returns the unique client ID of the connection.", + "Group": "Connection", + "Complexity": "O(1)" + }, + { + "Command": "CLIENT_LIST", + "Name": "CLIENT|LIST", + "Summary": "Lists open connections.", + "Group": "Connection", + "Complexity": "O(N) where N is the number of client connections", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CLIENT-TYPE", + "Type": "OneOf", + "Token": "TYPE", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NORMAL", + "DisplayText": "normal", + "Type": "PureToken", + "Token": "NORMAL" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MASTER", + "DisplayText": "master", + "Type": "PureToken", + "Token": "MASTER" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "REPLICA", + "DisplayText": "replica", + "Type": "PureToken", + "Token": "REPLICA" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PUBSUB", + "DisplayText": "pubsub", + "Type": "PureToken", + "Token": "PUBSUB" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLIENT-ID", + "DisplayText": "client-id", + "Type": "Integer", + "Token": "ID", + "ArgumentFlags": "Optional, Multiple" + } + ] + } + ] + }, + { + "Command": "CLUSTER", + "Name": "CLUSTER", + "Summary": "A container for Redis Cluster commands.", + "Group": "Cluster", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "CLUSTER_NODES", + "Name": "CLUSTER|NODES", + "Summary": "Returns the cluster configuration for a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of Cluster nodes" + }, + { + "Command": "CLUSTER_SETCONFIGEPOCH", + "Name": "CLUSTER|SET-CONFIG-EPOCH", + "Summary": "Sets the configuration epoch for a new node.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CONFIG-EPOCH", + "DisplayText": "config-epoch", + "Type": "Integer" + } + ] + }, + { + "Command": "CLUSTER_KEYSLOT", + "Name": "CLUSTER|KEYSLOT", + "Summary": "Returns the hash slot for a key.", + "Group": "Cluster", + "Complexity": "O(N) where N is the number of bytes in the key", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "String" + } + ] + }, + { + "Command": "CLUSTER_SLOTS", + "Name": "CLUSTER|SLOTS", + "Summary": "Returns the mapping of cluster slots to nodes.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of Cluster nodes", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060CLUSTER SHARDS\u0060" + }, + { + "Command": "CLUSTER_DELSLOTSRANGE", + "Name": "CLUSTER|DELSLOTSRANGE", + "Summary": "Sets hash slot ranges as unbound for a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of the slots between the start slot and end slot arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "RANGE", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START-SLOT", + "DisplayText": "start-slot", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "END-SLOT", + "DisplayText": "end-slot", + "Type": "Integer" + } + ] + } + ] + }, + { + "Command": "CLUSTER_ADDSLOTS", + "Name": "CLUSTER|ADDSLOTS", + "Summary": "Assigns new hash slots to a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of hash slot arguments", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLOT", + "DisplayText": "slot", + "Type": "Integer", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "CLUSTER_REPLICATE", + "Name": "CLUSTER|REPLICATE", + "Summary": "Configure a node as replica of a master node.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NODE-ID", + "DisplayText": "node-id", + "Type": "String" + } + ] + }, + { + "Command": "CLUSTER_INFO", + "Name": "CLUSTER|INFO", + "Summary": "Returns information about the state of a node.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "CLUSTER_DELSLOTS", + "Name": "CLUSTER|DELSLOTS", + "Summary": "Sets hash slots as unbound for a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of hash slot arguments", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLOT", + "DisplayText": "slot", + "Type": "Integer", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "CLUSTER_ADDSLOTSRANGE", + "Name": "CLUSTER|ADDSLOTSRANGE", + "Summary": "Assigns new hash slot ranges to a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of the slots between the start slot and end slot arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "RANGE", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START-SLOT", + "DisplayText": "start-slot", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "END-SLOT", + "DisplayText": "end-slot", + "Type": "Integer" + } + ] + } + ] + }, + { + "Command": "CLUSTER_SHARDS", + "Name": "CLUSTER|SHARDS", + "Summary": "Returns the mapping of cluster slots to shards.", + "Group": "Cluster", + "Complexity": "O(N) where N is the total number of cluster nodes" + }, + { + "Command": "CLUSTER_FORGET", + "Name": "CLUSTER|FORGET", + "Summary": "Removes a node from the nodes table.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NODE-ID", + "DisplayText": "node-id", + "Type": "String" + } + ] + }, + { + "Command": "CLUSTER_BUMPEPOCH", + "Name": "CLUSTER|BUMPEPOCH", + "Summary": "Advances the cluster config epoch.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "CLUSTER_MYID", + "Name": "CLUSTER|MYID", + "Summary": "Returns the ID of a node.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "CLUSTER_COUNTKEYSINSLOT", + "Name": "CLUSTER|COUNTKEYSINSLOT", + "Summary": "Returns the number of keys in a hash slot.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLOT", + "DisplayText": "slot", + "Type": "Integer" + } + ] + }, + { + "Command": "CLUSTER_MEET", + "Name": "CLUSTER|MEET", + "Summary": "Forces a node to handshake with another node.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "IP", + "DisplayText": "ip", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLUSTER-BUS-PORT", + "DisplayText": "cluster-bus-port", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "CLUSTER_RESET", + "Name": "CLUSTER|RESET", + "Summary": "Resets a node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the number of known nodes. The command may execute a FLUSHALL as a side effect.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "RESET-TYPE", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HARD", + "DisplayText": "hard", + "Type": "PureToken", + "Token": "HARD" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SOFT", + "DisplayText": "soft", + "Type": "PureToken", + "Token": "SOFT" + } + ] + } + ] + }, + { + "Command": "CLUSTER_GETKEYSINSLOT", + "Name": "CLUSTER|GETKEYSINSLOT", + "Summary": "Returns the key names in a hash slot.", + "Group": "Cluster", + "Complexity": "O(N) where N is the number of requested keys", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLOT", + "DisplayText": "slot", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + } + ] + }, + { + "Command": "CLUSTER_SETSLOT", + "Name": "CLUSTER|SETSLOT", + "Summary": "Binds a hash slot to a node.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SLOT", + "DisplayText": "slot", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "SUBCOMMAND", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "IMPORTING", + "DisplayText": "node-id", + "Type": "String", + "Token": "IMPORTING" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIGRATING", + "DisplayText": "node-id", + "Type": "String", + "Token": "MIGRATING" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NODE", + "DisplayText": "node-id", + "Type": "String", + "Token": "NODE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STABLE", + "DisplayText": "stable", + "Type": "PureToken", + "Token": "STABLE" + } + ] + } + ] + }, + { + "Command": "CLUSTER_HELP", + "Name": "CLUSTER|HELP", + "Summary": "Returns helpful text about the different subcommands.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "CLUSTER_REPLICAS", + "Name": "CLUSTER|REPLICAS", + "Summary": "Lists the replica nodes of a master node.", + "Group": "Cluster", + "Complexity": "O(N) where N is the number of replicas.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NODE-ID", + "DisplayText": "node-id", + "Type": "String" + } + ] + }, + { + "Command": "CLUSTER_FAILOVER", + "Name": "CLUSTER|FAILOVER", + "Summary": "Forces a replica to perform a manual failover of its master.", + "Group": "Cluster", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OPTIONS", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FORCE", + "DisplayText": "force", + "Type": "PureToken", + "Token": "FORCE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TAKEOVER", + "DisplayText": "takeover", + "Type": "PureToken", + "Token": "TAKEOVER" + } + ] + } + ] + } + ] + }, + { + "Command": "COMMAND", + "Name": "COMMAND", + "Summary": "Returns detailed information about all commands.", + "Group": "Server", + "Complexity": "O(N) where N is the total number of Redis commands", + "SubCommands": [ + { + "Command": "COMMAND_COUNT", + "Name": "COMMAND|COUNT", + "Summary": "Returns a count of commands.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "COMMAND_DOCS", + "Name": "COMMAND|DOCS", + "Summary": "Returns documentary information about one, multiple or all commands.", + "Group": "Server", + "Complexity": "O(N) where N is the number of commands to look up", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COMMAND-NAME", + "DisplayText": "command-name", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "COMMAND_INFO", + "Name": "COMMAND|INFO", + "Summary": "Returns information about one, multiple or all commands.", + "Group": "Server", + "Complexity": "O(N) where N is the number of commands to look up", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COMMAND-NAME", + "DisplayText": "command-name", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + } + ] + }, + { + "Command": "COMMITAOF", + "Name": "COMMITAOF", + "Summary": "Commit to append-only file.", + "Group": "Server" + }, + { + "Command": "CONFIG", + "Name": "CONFIG", + "Summary": "A container for server configuration commands.", + "Group": "Server", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "CONFIG_GET", + "Name": "CONFIG|GET", + "Summary": "Returns the effective values of configuration parameters.", + "Group": "Server", + "Complexity": "O(N) when N is the number of configuration parameters provided", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PARAMETER", + "DisplayText": "parameter", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "CONFIG_REWRITE", + "Name": "CONFIG|REWRITE", + "Summary": "Persists the effective configuration to file.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "CONFIG_SET", + "Name": "CONFIG|SET", + "Summary": "Sets configuration parameters in-flight.", + "Group": "Server", + "Complexity": "O(N) when N is the number of configuration parameters provided", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PARAMETER", + "DisplayText": "parameter", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + } + ] + } + ] + }, + { + "Command": "COSCAN", + "Name": "COSCAN", + "Summary": "Iterates over members of a collection object.", + "Group": "Generic", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "DBSIZE", + "Name": "DBSIZE", + "Summary": "Returns the number of keys in the database.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "DECR", + "Name": "DECR", + "Summary": "Decrements the integer value of a key by one. Uses 0 as initial value if the key doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "DECRBY", + "Name": "DECRBY", + "Summary": "Decrements a number from the integer value of a key. Uses 0 as initial value if the key doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "DECREMENT", + "DisplayText": "decrement", + "Type": "Integer" + } + ] + }, + { + "Command": "DEL", + "Name": "DEL", + "Summary": "Deletes one or more keys.", + "Group": "Generic", + "Complexity": "O(N) where N is the number of keys that will be removed. When a key to remove holds a value other than a string, the individual complexity for this key is O(M) where M is the number of elements in the list, set, sorted set or hash. Removing a single key that holds a string value is O(1).", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "DISCARD", + "Name": "DISCARD", + "Summary": "Discards a transaction.", + "Group": "Transactions", + "Complexity": "O(N), when N is the number of queued commands" + }, + { + "Command": "ECHO", + "Name": "ECHO", + "Summary": "Returns the given string.", + "Group": "Connection", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MESSAGE", + "DisplayText": "message", + "Type": "String" + } + ] + }, + { + "Command": "EVAL", + "Name": "EVAL", + "Summary": "Executes a server-side Lua script.", + "Group": "Scripting", + "Complexity": "Depends on the script that is executed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SCRIPT", + "DisplayText": "script", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMKEYS", + "DisplayText": "numkeys", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Optional, Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ARG", + "DisplayText": "arg", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "EVALSHA", + "Name": "EVALSHA", + "Summary": "Executes a server-side Lua script by SHA1 digest.", + "Group": "Scripting", + "Complexity": "Depends on the script that is executed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SHA1", + "DisplayText": "sha1", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMKEYS", + "DisplayText": "numkeys", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Optional, Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ARG", + "DisplayText": "arg", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "EXEC", + "Name": "EXEC", + "Summary": "Executes all commands in a transaction.", + "Group": "Transactions", + "Complexity": "Depends on commands in the transaction" + }, + { + "Command": "EXISTS", + "Name": "EXISTS", + "Summary": "Determines whether one or more keys exist.", + "Group": "Generic", + "Complexity": "O(N) where N is the number of keys to check.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "EXPIRE", + "Name": "EXPIRE", + "Summary": "Sets the expiration time of a key in seconds.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SECONDS", + "DisplayText": "seconds", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CONDITION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NX", + "DisplayText": "nx", + "Type": "PureToken", + "Token": "NX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XX", + "DisplayText": "xx", + "Type": "PureToken", + "Token": "XX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "GT", + "DisplayText": "gt", + "Type": "PureToken", + "Token": "GT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LT", + "DisplayText": "lt", + "Type": "PureToken", + "Token": "LT" + } + ] + } + ] + }, + { + "Command": "FAILOVER", + "Name": "FAILOVER", + "Summary": "Starts a coordinated failover from a server to one of its replicas.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "TARGET", + "Type": "Block", + "Token": "TO", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FORCE", + "DisplayText": "force", + "Type": "PureToken", + "Token": "FORCE", + "ArgumentFlags": "Optional" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ABORT", + "DisplayText": "abort", + "Type": "PureToken", + "Token": "ABORT", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MILLISECONDS", + "DisplayText": "milliseconds", + "Type": "Integer", + "Token": "TIMEOUT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "FLUSHALL", + "Name": "FLUSHALL", + "Summary": "Removes all keys from all databases.", + "Group": "Server", + "Complexity": "O(N) where N is the total number of keys in all databases", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "FLUSH-TYPE", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ASYNC", + "DisplayText": "async", + "Type": "PureToken", + "Token": "ASYNC" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SYNC", + "DisplayText": "sync", + "Type": "PureToken", + "Token": "SYNC" + } + ] + } + ] + }, + { + "Command": "FLUSHDB", + "Name": "FLUSHDB", + "Summary": "Remove all keys from the current database.", + "Group": "Server", + "Complexity": "O(N) where N is the number of keys in the selected database", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "FLUSH-TYPE", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ASYNC", + "DisplayText": "async", + "Type": "PureToken", + "Token": "ASYNC" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SYNC", + "DisplayText": "sync", + "Type": "PureToken", + "Token": "SYNC" + } + ] + } + ] + }, + { + "Command": "FORCEGC", + "Name": "FORCEGC", + "Summary": "Forces garbage collection.", + "Group": "Server" + }, + { + "Command": "GEOADD", + "Name": "GEOADD", + "Summary": "Adds one or more members to a geospatial index. The key is created if it doesn\u0027t exist.", + "Group": "Geo", + "Complexity": "O(log(N)) for each item added, where N is the number of elements in the sorted set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CONDITION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NX", + "DisplayText": "nx", + "Type": "PureToken", + "Token": "NX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XX", + "DisplayText": "xx", + "Type": "PureToken", + "Token": "XX" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CHANGE", + "DisplayText": "change", + "Type": "PureToken", + "Token": "CH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LONGITUDE", + "DisplayText": "longitude", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LATITUDE", + "DisplayText": "latitude", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "GEODIST", + "Name": "GEODIST", + "Summary": "Returns the distance between two members of a geospatial index.", + "Group": "Geo", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER1", + "DisplayText": "member1", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER2", + "DisplayText": "member2", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "UNIT", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "M", + "DisplayText": "m", + "Type": "PureToken", + "Token": "M" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "KM", + "DisplayText": "km", + "Type": "PureToken", + "Token": "KM" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FT", + "DisplayText": "ft", + "Type": "PureToken", + "Token": "FT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MI", + "DisplayText": "mi", + "Type": "PureToken", + "Token": "MI" + } + ] + } + ] + }, + { + "Command": "GEOHASH", + "Name": "GEOHASH", + "Summary": "Returns members from a geospatial index as geohash strings.", + "Group": "Geo", + "Complexity": "O(1) for each member requested.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "GEOPOS", + "Name": "GEOPOS", + "Summary": "Returns the longitude and latitude of members from a geospatial index.", + "Group": "Geo", + "Complexity": "O(1) for each member requested.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "GEOSEARCH", + "Name": "GEOSEARCH", + "Summary": "Queries a geospatial index for members inside an area of a box or a circle.", + "Group": "Geo", + "Complexity": "O(N\u002Blog(M)) where N is the number of elements in the grid-aligned bounding box area around the shape provided as the filter and M is the number of items inside the shape", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "FROM", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "Token": "FROMMEMBER" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "FROMLONLAT", + "Type": "Block", + "Token": "FROMLONLAT", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LONGITUDE", + "DisplayText": "longitude", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LATITUDE", + "DisplayText": "latitude", + "Type": "Double" + } + ] + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "BY", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CIRCLE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RADIUS", + "DisplayText": "radius", + "Type": "Double", + "Token": "BYRADIUS" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "UNIT", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "M", + "DisplayText": "m", + "Type": "PureToken", + "Token": "M" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "KM", + "DisplayText": "km", + "Type": "PureToken", + "Token": "KM" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FT", + "DisplayText": "ft", + "Type": "PureToken", + "Token": "FT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MI", + "DisplayText": "mi", + "Type": "PureToken", + "Token": "MI" + } + ] + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "BOX", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WIDTH", + "DisplayText": "width", + "Type": "Double", + "Token": "BYBOX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HEIGHT", + "DisplayText": "height", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "UNIT", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "M", + "DisplayText": "m", + "Type": "PureToken", + "Token": "M" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "KM", + "DisplayText": "km", + "Type": "PureToken", + "Token": "KM" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FT", + "DisplayText": "ft", + "Type": "PureToken", + "Token": "FT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MI", + "DisplayText": "mi", + "Type": "PureToken", + "Token": "MI" + } + ] + } + ] + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ORDER", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ASC", + "DisplayText": "asc", + "Type": "PureToken", + "Token": "ASC" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "DESC", + "DisplayText": "desc", + "Type": "PureToken", + "Token": "DESC" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "COUNT-BLOCK", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ANY", + "DisplayText": "any", + "Type": "PureToken", + "Token": "ANY", + "ArgumentFlags": "Optional" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHCOORD", + "DisplayText": "withcoord", + "Type": "PureToken", + "Token": "WITHCOORD", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHDIST", + "DisplayText": "withdist", + "Type": "PureToken", + "Token": "WITHDIST", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHHASH", + "DisplayText": "withhash", + "Type": "PureToken", + "Token": "WITHHASH", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "GET", + "Name": "GET", + "Summary": "Returns the string value of a key.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "GETBIT", + "Name": "GETBIT", + "Summary": "Returns a bit value by offset.", + "Group": "Bitmap", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + } + ] + }, + { + "Command": "GETDEL", + "Name": "GETDEL", + "Summary": "Returns the string value of a key after deleting the key.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "GETRANGE", + "Name": "GETRANGE", + "Summary": "Returns a substring of the string stored at a key.", + "Group": "String", + "Complexity": "O(N) where N is the length of the returned string. The complexity is ultimately determined by the returned length, but because creating a substring from an existing string is very cheap, it can be considered O(1) for small strings.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "END", + "DisplayText": "end", + "Type": "Integer" + } + ] + }, + { + "Command": "HDEL", + "Name": "HDEL", + "Summary": "Deletes one or more fields and their values from a hash. Deletes the hash if no fields remain.", + "Group": "Hash", + "Complexity": "O(N) where N is the number of fields to be removed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "HELLO", + "Name": "HELLO", + "Summary": "Handshakes with the Redis server.", + "Group": "Connection", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGUMENTS", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PROTOVER", + "DisplayText": "protover", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "AUTH", + "Type": "Block", + "Token": "AUTH", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PASSWORD", + "DisplayText": "password", + "Type": "String" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLIENTNAME", + "DisplayText": "clientname", + "Type": "String", + "Token": "SETNAME", + "ArgumentFlags": "Optional" + } + ] + } + ] + }, + { + "Command": "HEXISTS", + "Name": "HEXISTS", + "Summary": "Determines whether a field exists in a hash.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + } + ] + }, + { + "Command": "HGET", + "Name": "HGET", + "Summary": "Returns the value of a field in a hash.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + } + ] + }, + { + "Command": "HGETALL", + "Name": "HGETALL", + "Summary": "Returns all fields and values in a hash.", + "Group": "Hash", + "Complexity": "O(N) where N is the size of the hash.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "HINCRBY", + "Name": "HINCRBY", + "Summary": "Increments the integer value of a field in a hash by a number. Uses 0 as initial value if the field doesn\u0027t exist.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "Integer" + } + ] + }, + { + "Command": "HINCRBYFLOAT", + "Name": "HINCRBYFLOAT", + "Summary": "Increments the floating point value of a field by a number. Uses 0 as initial value if the field doesn\u0027t exist.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "Double" + } + ] + }, + { + "Command": "HKEYS", + "Name": "HKEYS", + "Summary": "Returns all fields in a hash.", + "Group": "Hash", + "Complexity": "O(N) where N is the size of the hash.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "HLEN", + "Name": "HLEN", + "Summary": "Returns the number of fields in a hash.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "HMGET", + "Name": "HMGET", + "Summary": "Returns the values of all fields in a hash.", + "Group": "Hash", + "Complexity": "O(N) where N is the number of fields being requested.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "HMSET", + "Name": "HMSET", + "Summary": "Sets the values of multiple fields.", + "Group": "Hash", + "Complexity": "O(N) where N is the number of fields being set.", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060HSET\u0060 with multiple field-value pairs", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "HRANDFIELD", + "Name": "HRANDFIELD", + "Summary": "Returns one or more random fields from a hash.", + "Group": "Hash", + "Complexity": "O(N) where N is the number of fields returned", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OPTIONS", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHVALUES", + "DisplayText": "withvalues", + "Type": "PureToken", + "Token": "WITHVALUES", + "ArgumentFlags": "Optional" + } + ] + } + ] + }, + { + "Command": "HSCAN", + "Name": "HSCAN", + "Summary": "Iterates over fields and values of a hash.", + "Group": "Hash", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "HSET", + "Name": "HSET", + "Summary": "Creates or modifies the value of a field in a hash.", + "Group": "Hash", + "Complexity": "O(1) for each field/value pair added, so O(N) to add N field/value pairs when the command is called with multiple field/value pairs.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "HSETNX", + "Name": "HSETNX", + "Summary": "Sets the value of a field in a hash only when the field doesn\u0027t exist.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "HSTRLEN", + "Name": "HSTRLEN", + "Summary": "Returns the length of the value of a field.", + "Group": "Hash", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "FIELD", + "DisplayText": "field", + "Type": "String" + } + ] + }, + { + "Command": "HVALS", + "Name": "HVALS", + "Summary": "Returns all values in a hash.", + "Group": "Hash", + "Complexity": "O(N) where N is the size of the hash.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "INCR", + "Name": "INCR", + "Summary": "Increments the integer value of a key by one. Uses 0 as initial value if the key doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "INCRBY", + "Name": "INCRBY", + "Summary": "Increments the integer value of a key by a number. Uses 0 as initial value if the key doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "Integer" + } + ] + }, + { + "Command": "INFO", + "Name": "INFO", + "Summary": "Returns information and statistics about the server.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SECTION", + "DisplayText": "section", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "KEYS", + "Name": "KEYS", + "Summary": "Returns all key names that match a pattern.", + "Group": "Generic", + "Complexity": "O(N) with N being the number of keys in the database, under the assumption that the key names in the database and the given pattern have limited length.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern" + } + ] + }, + { + "Command": "LASTSAVE", + "Name": "LASTSAVE", + "Summary": "Returns the Unix timestamp of the last successful save to disk.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "LATENCY", + "Name": "LATENCY", + "Summary": "A container for latency diagnostics commands.", + "Group": "Server", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "LATENCY_HELP", + "Name": "LATENCY|HELP", + "Summary": "Returns helpful text about the different subcommands.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "LATENCY_HISTOGRAM", + "Name": "LATENCY|HISTOGRAM", + "Summary": "Returns the cumulative distribution of latencies of a subset or all commands.", + "Group": "Server", + "Complexity": "O(N) where N is the number of commands with latency information being retrieved.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COMMAND", + "DisplayText": "command", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "LATENCY_RESET", + "Name": "LATENCY|RESET", + "Summary": "Resets the latency data for one or more events.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "EVENT", + "DisplayText": "event", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + } + ] + }, + { + "Command": "LINDEX", + "Name": "LINDEX", + "Summary": "Returns an element from a list by its index.", + "Group": "List", + "Complexity": "O(N) where N is the number of elements to traverse to get to the element at index. This makes asking for the first or the last element of the list O(1).", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INDEX", + "DisplayText": "index", + "Type": "Integer" + } + ] + }, + { + "Command": "LINSERT", + "Name": "LINSERT", + "Summary": "Inserts an element before or after another element in a list.", + "Group": "List", + "Complexity": "O(N) where N is the number of elements to traverse before seeing the value pivot. This means that inserting somewhere on the left end on the list (head) can be considered O(1) and inserting somewhere on the right end (tail) is O(N).", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHERE", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BEFORE", + "DisplayText": "before", + "Type": "PureToken", + "Token": "BEFORE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "AFTER", + "DisplayText": "after", + "Type": "PureToken", + "Token": "AFTER" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PIVOT", + "DisplayText": "pivot", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String" + } + ] + }, + { + "Command": "LLEN", + "Name": "LLEN", + "Summary": "Returns the length of a list.", + "Group": "List", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "LMOVE", + "Name": "LMOVE", + "Summary": "Returns an element after popping it from one list and pushing it to another. Deletes the list if the last element was moved.", + "Group": "List", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "SOURCE", + "DisplayText": "source", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 1 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHEREFROM", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LEFT", + "DisplayText": "left", + "Type": "PureToken", + "Token": "LEFT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RIGHT", + "DisplayText": "right", + "Type": "PureToken", + "Token": "RIGHT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHERETO", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LEFT", + "DisplayText": "left", + "Type": "PureToken", + "Token": "LEFT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RIGHT", + "DisplayText": "right", + "Type": "PureToken", + "Token": "RIGHT" + } + ] + } + ] + }, + { + "Command": "LMPOP", + "Name": "LMPOP", + "Summary": "Returns multiple elements from a list after removing them. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(N\u002BM) where N is the number of provided keys and M is the number of elements returned.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMKEYS", + "DisplayText": "numkeys", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "WHERE", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LEFT", + "DisplayText": "left", + "Type": "PureToken", + "Token": "LEFT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RIGHT", + "DisplayText": "right", + "Type": "PureToken", + "Token": "RIGHT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "LPOP", + "Name": "LPOP", + "Summary": "Returns the first elements in a list after removing it. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(N) where N is the number of elements returned", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "LPUSH", + "Name": "LPUSH", + "Summary": "Prepends one or more elements to a list. Creates the key if it doesn\u0027t exist.", + "Group": "List", + "Complexity": "O(1) for each element added, so O(N) to add N elements when the command is called with multiple arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "LPUSHX", + "Name": "LPUSHX", + "Summary": "Prepends one or more elements to a list only when the list exists.", + "Group": "List", + "Complexity": "O(1) for each element added, so O(N) to add N elements when the command is called with multiple arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "LRANGE", + "Name": "LRANGE", + "Summary": "Returns a range of elements from a list.", + "Group": "List", + "Complexity": "O(S\u002BN) where S is the distance of start offset from HEAD for small lists, from nearest end (HEAD or TAIL) for large lists; and N is the number of elements in the specified range.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STOP", + "DisplayText": "stop", + "Type": "Integer" + } + ] + }, + { + "Command": "LREM", + "Name": "LREM", + "Summary": "Removes elements from a list. Deletes the list if the last element was removed.", + "Group": "List", + "Complexity": "O(N\u002BM) where N is the length of the list and M is the number of elements removed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String" + } + ] + }, + { + "Command": "LSET", + "Name": "LSET", + "Summary": "Sets the value of an element in a list by its index.", + "Group": "List", + "Complexity": "O(N) where N is the length of the list. Setting either the first or the last element of the list is O(1).", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INDEX", + "DisplayText": "index", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String" + } + ] + }, + { + "Command": "LTRIM", + "Name": "LTRIM", + "Summary": "Removes elements from both ends a list. Deletes the list if all elements were trimmed.", + "Group": "List", + "Complexity": "O(N) where N is the number of elements to be removed by the operation.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STOP", + "DisplayText": "stop", + "Type": "Integer" + } + ] + }, + { + "Command": "MEMORY", + "Name": "MEMORY", + "Summary": "A container for memory diagnostics commands.", + "Group": "Server", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "MEMORY_USAGE", + "Name": "MEMORY|USAGE", + "Summary": "Estimates the memory usage of a key.", + "Group": "Server", + "Complexity": "O(N) where N is the number of samples.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "SAMPLES", + "ArgumentFlags": "Optional" + } + ] + } + ] + }, + { + "Command": "MGET", + "Name": "MGET", + "Summary": "Atomically returns the string values of one or more keys.", + "Group": "String", + "Complexity": "O(N) where N is the number of keys to retrieve.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "MIGRATE", + "Name": "MIGRATE", + "Summary": "Atomically transfers a key from one Redis instance to another.", + "Group": "Generic", + "Complexity": "This command actually executes a DUMP\u002BDEL in the source instance, and a RESTORE in the target instance. See the pages of these commands for time complexity. Also an O(N) data transfer between the two instances is performed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "KEY-SELECTOR", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "EMPTY-STRING", + "DisplayText": "empty-string", + "Type": "PureToken", + "Token": "" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "DESTINATION-DB", + "DisplayText": "destination-db", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TIMEOUT", + "DisplayText": "timeout", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COPY", + "DisplayText": "copy", + "Type": "PureToken", + "Token": "COPY", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "REPLACE", + "DisplayText": "replace", + "Type": "PureToken", + "Token": "REPLACE", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "AUTHENTICATION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "AUTH", + "DisplayText": "password", + "Type": "String", + "Token": "AUTH" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "AUTH2", + "Type": "Block", + "Token": "AUTH2", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "USERNAME", + "DisplayText": "username", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PASSWORD", + "DisplayText": "password", + "Type": "String" + } + ] + } + ] + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEYS", + "DisplayText": "key", + "Type": "Key", + "Token": "KEYS", + "ArgumentFlags": "Optional, Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "MODULE", + "Name": "MODULE", + "Summary": "A container for module commands.", + "Group": "Server", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "MODULE_LOADCS", + "Name": "MODULE|LOADCS", + "Summary": "Load a C# module in Garnet.", + "Group": "Server", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SRCPATH", + "DisplayText": "srcPath", + "Type": "String", + "Summary": "Source path to module definition" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MODULEARG", + "DisplayText": "arg", + "Type": "String", + "Summary": "Module argument", + "ArgumentFlags": "Multiple" + } + ] + } + ] + }, + { + "Command": "MONITOR", + "Name": "MONITOR", + "Summary": "Listens for all requests received by the server in real-time.", + "Group": "Server" + }, + { + "Command": "MSET", + "Name": "MSET", + "Summary": "Atomically creates or modifies the string values of one or more keys.", + "Group": "String", + "Complexity": "O(N) where N is the number of keys to set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "MSETNX", + "Name": "MSETNX", + "Summary": "Atomically modifies the string values of one or more keys only when all keys don\u0027t exist.", + "Group": "String", + "Complexity": "O(N) where N is the number of keys to set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "MULTI", + "Name": "MULTI", + "Summary": "Starts a transaction.", + "Group": "Transactions", + "Complexity": "O(1)" + }, + { + "Command": "PERSIST", + "Name": "PERSIST", + "Summary": "Removes the expiration time of a key.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "PEXPIRE", + "Name": "PEXPIRE", + "Summary": "Sets the expiration time of a key in milliseconds.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MILLISECONDS", + "DisplayText": "milliseconds", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CONDITION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NX", + "DisplayText": "nx", + "Type": "PureToken", + "Token": "NX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XX", + "DisplayText": "xx", + "Type": "PureToken", + "Token": "XX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "GT", + "DisplayText": "gt", + "Type": "PureToken", + "Token": "GT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LT", + "DisplayText": "lt", + "Type": "PureToken", + "Token": "LT" + } + ] + } + ] + }, + { + "Command": "PFADD", + "Name": "PFADD", + "Summary": "Adds elements to a HyperLogLog key. Creates the key if it doesn\u0027t exist.", + "Group": "HyperLogLog", + "Complexity": "O(1) to add every element.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "PFCOUNT", + "Name": "PFCOUNT", + "Summary": "Returns the approximated cardinality of the set(s) observed by the HyperLogLog key(s).", + "Group": "HyperLogLog", + "Complexity": "O(1) with a very small average constant time when called with a single key. O(N) with N being the number of keys, and much bigger constant times, when called with multiple keys.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "PFMERGE", + "Name": "PFMERGE", + "Summary": "Merges one or more HyperLogLog values into a single key.", + "Group": "HyperLogLog", + "Complexity": "O(N) to merge N HyperLogLogs, but with high constant times.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTKEY", + "DisplayText": "destkey", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "SOURCEKEY", + "DisplayText": "sourcekey", + "Type": "Key", + "ArgumentFlags": "Optional, Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "PING", + "Name": "PING", + "Summary": "Returns the server\u0027s liveliness response.", + "Group": "Connection", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MESSAGE", + "DisplayText": "message", + "Type": "String", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "PSETEX", + "Name": "PSETEX", + "Summary": "Sets both string value and expiration time in milliseconds of a key. The key is created if it doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060SET\u0060 with the \u0060PX\u0060 argument", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MILLISECONDS", + "DisplayText": "milliseconds", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "PSUBSCRIBE", + "Name": "PSUBSCRIBE", + "Summary": "Listens for messages published to channels that match one or more patterns.", + "Group": "PubSub", + "Complexity": "O(N) where N is the number of patterns to subscribe to.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "PTTL", + "Name": "PTTL", + "Summary": "Returns the expiration time in milliseconds of a key.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "PUBLISH", + "Name": "PUBLISH", + "Summary": "Posts a message to a channel.", + "Group": "PubSub", + "Complexity": "O(N\u002BM) where N is the number of clients subscribed to the receiving channel and M is the total number of subscribed patterns (by any client).", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CHANNEL", + "DisplayText": "channel", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MESSAGE", + "DisplayText": "message", + "Type": "String" + } + ] + }, + { + "Command": "PUNSUBSCRIBE", + "Name": "PUNSUBSCRIBE", + "Summary": "Stops listening to messages published to channels that match one or more patterns.", + "Group": "PubSub", + "Complexity": "O(N) where N is the number of patterns to unsubscribe.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "QUIT", + "Name": "QUIT", + "Summary": "Closes the connection.", + "Group": "Connection", + "Complexity": "O(1)", + "DocFlags": "Deprecated", + "ReplacedBy": "just closing the connection" + }, + { + "Command": "READONLY", + "Name": "READONLY", + "Summary": "Enables read-only queries for a connection to a Redis Cluster replica node.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "READWRITE", + "Name": "READWRITE", + "Summary": "Enables read-write queries for a connection to a Reids Cluster replica node.", + "Group": "Cluster", + "Complexity": "O(1)" + }, + { + "Command": "REGISTERCS", + "Name": "REGISTERCS", + "Summary": "Registers custom C# commands in Garnet.", + "Group": "Server", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CMD", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CMDTYPE", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "READ", + "DisplayText": "read", + "Type": "PureToken", + "Token": "READ" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RMW", + "DisplayText": "rmw", + "Type": "PureToken", + "Token": "RMW" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXN", + "DisplayText": "txn", + "Type": "PureToken", + "Token": "TXN" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NAME", + "DisplayText": "cmdName", + "Type": "String", + "Summary": "Name of the command to register" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMPARAMS", + "DisplayText": "numParams", + "Type": "Integer", + "Summary": "Numer of parameters of the command to register" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLASSNAME", + "DisplayText": "className", + "Type": "String", + "Summary": "Name of class defining the command to register" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "EXPTICKS", + "DisplayText": "expTicks", + "Type": "Integer", + "Summary": "Expiry of the command to register (in ticks)", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OBJCMDNAME", + "DisplayText": "objCmdName", + "Type": "String", + "Summary": "The object command name, if applicable", + "ArgumentFlags": "Optional" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INFO", + "DisplayText": "infoPath", + "Type": "String", + "Token": "INFO", + "Summary": "Path to JSON-serialized command info for registered commands", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "DOCS", + "DisplayText": "docsPath", + "Type": "String", + "Token": "DOCS", + "Summary": "Path to JSON-serialized command docs for registered commands", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SRC", + "DisplayText": "srcPath", + "Type": "String", + "Token": "SRC", + "Summary": "Source paths containing classes defining command to register", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "RENAME", + "Name": "RENAME", + "Summary": "Renames a key and overwrites the destination.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "NEWKEY", + "DisplayText": "newkey", + "Type": "Key", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "REPLICAOF", + "Name": "REPLICAOF", + "Summary": "Configures a server as replica of another, or promotes it to a master.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "HOST-PORT", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "NO-ONE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NO", + "DisplayText": "no", + "Type": "PureToken", + "Token": "NO" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ONE", + "DisplayText": "one", + "Type": "PureToken", + "Token": "ONE" + } + ] + } + ] + } + ] + }, + { + "Command": "RPOP", + "Name": "RPOP", + "Summary": "Returns and removes the last elements of a list. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(N) where N is the number of elements returned", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "RPOPLPUSH", + "Name": "RPOPLPUSH", + "Summary": "Returns the last element of a list after removing and pushing it to another list. Deletes the list if the last element was popped.", + "Group": "List", + "Complexity": "O(1)", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060LMOVE\u0060 with the \u0060RIGHT\u0060 and \u0060LEFT\u0060 arguments", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "SOURCE", + "DisplayText": "source", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "RPUSH", + "Name": "RPUSH", + "Summary": "Appends one or more elements to a list. Creates the key if it doesn\u0027t exist.", + "Group": "List", + "Complexity": "O(1) for each element added, so O(N) to add N elements when the command is called with multiple arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "RPUSHX", + "Name": "RPUSHX", + "Summary": "Appends an element to a list only when the list exists.", + "Group": "List", + "Complexity": "O(1) for each element added, so O(N) to add N elements when the command is called with multiple arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ELEMENT", + "DisplayText": "element", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "RUNTXP", + "Name": "RUNTXP", + "Summary": "Run registered transaction in Garnet.", + "Group": "Transactions", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXNID", + "DisplayText": "txnId", + "Type": "Integer", + "Summary": "Registered transaction ID" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXNPARAM", + "DisplayText": "param", + "Type": "String", + "Summary": "Transaction parameter", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "SADD", + "Name": "SADD", + "Summary": "Adds one or more members to a set. Creates the key if it doesn\u0027t exist.", + "Group": "Set", + "Complexity": "O(1) for each element added, so O(N) to add N elements when the command is called with multiple arguments.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "SAVE", + "Name": "SAVE", + "Summary": "Synchronously saves the database(s) to disk.", + "Group": "Server", + "Complexity": "O(N) where N is the total number of keys in all databases" + }, + { + "Command": "SCAN", + "Name": "SCAN", + "Summary": "Iterates over the key names in the database.", + "Group": "Generic", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TYPE", + "DisplayText": "type", + "Type": "String", + "Token": "TYPE", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "SCARD", + "Name": "SCARD", + "Summary": "Returns the number of members in a set.", + "Group": "Set", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SCRIPT", + "Name": "SCRIPT", + "Summary": "A container for Lua scripts management commands.", + "Group": "Scripting", + "Complexity": "Depends on subcommand." + }, + { + "Command": "SDIFF", + "Name": "SDIFF", + "Summary": "Returns the difference of multiple sets.", + "Group": "Set", + "Complexity": "O(N) where N is the total number of elements in all given sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SDIFFSTORE", + "Name": "SDIFFSTORE", + "Summary": "Stores the difference of multiple sets in a key.", + "Group": "Set", + "Complexity": "O(N) where N is the total number of elements in all given sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "SECONDARYOF", + "Name": "SECONDARYOF", + "Summary": "Configures a server as secondary of another, or promotes it to a primary.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "HOST-PORT", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "NO-ONE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NO", + "DisplayText": "no", + "Type": "PureToken", + "Token": "NO" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ONE", + "DisplayText": "one", + "Type": "PureToken", + "Token": "ONE" + } + ] + } + ] + } + ] + }, + { + "Command": "SELECT", + "Name": "SELECT", + "Summary": "Changes the selected database.", + "Group": "Connection", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INDEX", + "DisplayText": "index", + "Type": "Integer" + } + ] + }, + { + "Command": "SET", + "Name": "SET", + "Summary": "Sets the string value of a key, ignoring its type. The key is created if it doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CONDITION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NX", + "DisplayText": "nx", + "Type": "PureToken", + "Token": "NX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XX", + "DisplayText": "xx", + "Type": "PureToken", + "Token": "XX" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "GET", + "DisplayText": "get", + "Type": "PureToken", + "Token": "GET", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "EXPIRATION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SECONDS", + "DisplayText": "seconds", + "Type": "Integer", + "Token": "EX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MILLISECONDS", + "DisplayText": "milliseconds", + "Type": "Integer", + "Token": "PX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "UNIX-TIME-SECONDS", + "DisplayText": "unix-time-seconds", + "Type": "UnixTime", + "Token": "EXAT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "UNIX-TIME-MILLISECONDS", + "DisplayText": "unix-time-milliseconds", + "Type": "UnixTime", + "Token": "PXAT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "KEEPTTL", + "DisplayText": "keepttl", + "Type": "PureToken", + "Token": "KEEPTTL" + } + ] + } + ] + }, + { + "Command": "SETBIT", + "Name": "SETBIT", + "Summary": "Sets or clears the bit at offset of the string value. Creates the key if it doesn\u0027t exist.", + "Group": "Bitmap", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "Integer" + } + ] + }, + { + "Command": "SETEX", + "Name": "SETEX", + "Summary": "Sets the string value and expiration time of a key. Creates the key if it doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1)", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060SET\u0060 with the \u0060EX\u0060 argument", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SECONDS", + "DisplayText": "seconds", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "SETRANGE", + "Name": "SETRANGE", + "Summary": "Overwrites a part of a string value with another by an offset. Creates the key if it doesn\u0027t exist.", + "Group": "String", + "Complexity": "O(1), not counting the time taken to copy the new string in place. Usually, this string is very small so the amortized complexity is O(1). Otherwise, complexity is O(M) with M being the length of the value argument.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "SINTER", + "Name": "SINTER", + "Summary": "Returns the intersect of multiple sets.", + "Group": "Set", + "Complexity": "O(N*M) worst case where N is the cardinality of the smallest set and M is the number of sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SINTERSTORE", + "Name": "SINTERSTORE", + "Summary": "Stores the intersect of multiple sets in a key.", + "Group": "Set", + "Complexity": "O(N*M) worst case where N is the cardinality of the smallest set and M is the number of sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "SISMEMBER", + "Name": "SISMEMBER", + "Summary": "Determines whether a member belongs to a set.", + "Group": "Set", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + }, + { + "Command": "SECONDARYOF", + "Name": "SLAVEOF", + "Summary": "Sets a Redis server as a replica of another, or promotes it to being a master.", + "Group": "Server", + "Complexity": "O(1)", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060REPLICAOF\u0060", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "HOST-PORT", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "NO-ONE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NO", + "DisplayText": "no", + "Type": "PureToken", + "Token": "NO" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ONE", + "DisplayText": "one", + "Type": "PureToken", + "Token": "ONE" + } + ] + } + ] + } + ] + }, + { + "Command": "SMEMBERS", + "Name": "SMEMBERS", + "Summary": "Returns all members of a set.", + "Group": "Set", + "Complexity": "O(N) where N is the set cardinality.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SMOVE", + "Name": "SMOVE", + "Summary": "Moves a member from one set to another.", + "Group": "Set", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "SOURCE", + "DisplayText": "source", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 1 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + }, + { + "Command": "SPOP", + "Name": "SPOP", + "Summary": "Returns one or more random members from a set after removing them. Deletes the set if the last member was popped.", + "Group": "Set", + "Complexity": "Without the count argument O(1), otherwise O(N) where N is the value of the passed count.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "SRANDMEMBER", + "Name": "SRANDMEMBER", + "Summary": "Get one or multiple random members from a set", + "Group": "Set", + "Complexity": "Without the count argument O(1), otherwise O(N) where N is the absolute value of the passed count.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "SREM", + "Name": "SREM", + "Summary": "Removes one or more members from a set. Deletes the set if the last member was removed.", + "Group": "Set", + "Complexity": "O(N) where N is the number of members to be removed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "SSCAN", + "Name": "SSCAN", + "Summary": "Iterates over members of a set.", + "Group": "Set", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "STRLEN", + "Name": "STRLEN", + "Summary": "Returns the length of a string value.", + "Group": "String", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SUBSCRIBE", + "Name": "SUBSCRIBE", + "Summary": "Listens for messages published to channels.", + "Group": "PubSub", + "Complexity": "O(N) where N is the number of channels to subscribe to.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CHANNEL", + "DisplayText": "channel", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "SUNION", + "Name": "SUNION", + "Summary": "Returns the union of multiple sets.", + "Group": "Set", + "Complexity": "O(N) where N is the total number of elements in all given sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "SUNIONSTORE", + "Name": "SUNIONSTORE", + "Summary": "Stores the union of multiple sets in a key.", + "Group": "Set", + "Complexity": "O(N) where N is the total number of elements in all given sets.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "DESTINATION", + "DisplayText": "destination", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 1 + } + ] + }, + { + "Command": "TIME", + "Name": "TIME", + "Summary": "Returns the server time.", + "Group": "Server", + "Complexity": "O(1)" + }, + { + "Command": "TTL", + "Name": "TTL", + "Summary": "Returns the expiration time in seconds of a key.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "TYPE", + "Name": "TYPE", + "Summary": "Determines the type of value stored at a key.", + "Group": "Generic", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "UNLINK", + "Name": "UNLINK", + "Summary": "Asynchronously deletes one or more keys.", + "Group": "Generic", + "Complexity": "O(1) for each key removed regardless of its size. Then the command does O(N) work in a different thread in order to reclaim memory, where N is the number of allocations the deleted objects where composed of.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "UNSUBSCRIBE", + "Name": "UNSUBSCRIBE", + "Summary": "Stops listening to messages posted to channels.", + "Group": "PubSub", + "Complexity": "O(N) where N is the number of channels to unsubscribe.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CHANNEL", + "DisplayText": "channel", + "Type": "String", + "ArgumentFlags": "Optional, Multiple" + } + ] + }, + { + "Command": "UNWATCH", + "Name": "UNWATCH", + "Summary": "Forgets about watched keys of a transaction.", + "Group": "Transactions", + "Complexity": "O(1)" + }, + { + "Command": "WATCH", + "Name": "WATCH", + "Summary": "Monitors changes to keys to determine the execution of a transaction.", + "Group": "Transactions", + "Complexity": "O(1) for every key.", + "SubCommands": [ + { + "Command": "WATCH_MS", + "Name": "WATCH|MS", + "Summary": "Monitors changes to keys in main store to determine the execution of a transaction.", + "Group": "Transactions", + "Complexity": "O(1) for every key.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "WATCH_OS", + "Name": "WATCH|OS", + "Summary": "Monitors changes to keys in object store to determine the execution of a transaction.", + "Group": "Transactions", + "Complexity": "O(1) for every key.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + } + ], + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "ZADD", + "Name": "ZADD", + "Summary": "Adds one or more members to a sorted set, or updates their scores. Creates the key if it doesn\u0027t exist.", + "Group": "SortedSet", + "Complexity": "O(log(N)) for each item added, where N is the number of elements in the sorted set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CONDITION", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NX", + "DisplayText": "nx", + "Type": "PureToken", + "Token": "NX" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "XX", + "DisplayText": "xx", + "Type": "PureToken", + "Token": "XX" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "COMPARISON", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "GT", + "DisplayText": "gt", + "Type": "PureToken", + "Token": "GT" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "LT", + "DisplayText": "lt", + "Type": "PureToken", + "Token": "LT" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CHANGE", + "DisplayText": "change", + "Type": "PureToken", + "Token": "CH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "PureToken", + "Token": "INCR", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "DATA", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SCORE", + "DisplayText": "score", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + } + ] + }, + { + "Command": "ZCARD", + "Name": "ZCARD", + "Summary": "Returns the number of members in a sorted set.", + "Group": "SortedSet", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "ZCOUNT", + "Name": "ZCOUNT", + "Summary": "Returns the count of members in a sorted set that have scores within a range.", + "Group": "SortedSet", + "Complexity": "O(log(N)) with N being the number of elements in the sorted set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "Double" + } + ] + }, + { + "Command": "ZDIFF", + "Name": "ZDIFF", + "Summary": "Returns the difference between multiple sorted sets.", + "Group": "SortedSet", + "Complexity": "O(L \u002B (N-K)log(N)) worst case where L is the total number of elements in all the sets, N is the size of the first set, and K is the size of the result set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMKEYS", + "DisplayText": "numkeys", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZINCRBY", + "Name": "ZINCRBY", + "Summary": "Increments the score of a member in a sorted set.", + "Group": "SortedSet", + "Complexity": "O(log(N)) where N is the number of elements in the sorted set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INCREMENT", + "DisplayText": "increment", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + }, + { + "Command": "ZLEXCOUNT", + "Name": "ZLEXCOUNT", + "Summary": "Returns the number of members in a sorted set within a lexicographical range.", + "Group": "SortedSet", + "Complexity": "O(log(N)) with N being the number of elements in the sorted set.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "String" + } + ] + }, + { + "Command": "ZMSCORE", + "Name": "ZMSCORE", + "Summary": "Returns the score of one or more members in a sorted set.", + "Group": "SortedSet", + "Complexity": "O(N) where N is the number of members being requested.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "ZPOPMAX", + "Name": "ZPOPMAX", + "Summary": "Returns the highest-scoring members from a sorted set after removing them. Deletes the sorted set if the last member was popped.", + "Group": "SortedSet", + "Complexity": "O(log(N)*M) with N being the number of elements in the sorted set, and M being the number of elements popped.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZPOPMIN", + "Name": "ZPOPMIN", + "Summary": "Returns the lowest-scoring members from a sorted set after removing them. Deletes the sorted set if the last member was popped.", + "Group": "SortedSet", + "Complexity": "O(log(N)*M) with N being the number of elements in the sorted set, and M being the number of elements popped.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZRANDMEMBER", + "Name": "ZRANDMEMBER", + "Summary": "Returns one or more random members from a sorted set.", + "Group": "SortedSet", + "Complexity": "O(N) where N is the number of members returned", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "OPTIONS", + "Type": "Block", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + } + ] + } + ] + }, + { + "Command": "ZRANGE", + "Name": "ZRANGE", + "Summary": "Returns members in a sorted set within a range of indexes.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements returned.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STOP", + "DisplayText": "stop", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "SORTBY", + "Type": "OneOf", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BYSCORE", + "DisplayText": "byscore", + "Type": "PureToken", + "Token": "BYSCORE" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BYLEX", + "DisplayText": "bylex", + "Type": "PureToken", + "Token": "BYLEX" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "REV", + "DisplayText": "rev", + "Type": "PureToken", + "Token": "REV", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "LIMIT", + "Type": "Block", + "Token": "LIMIT", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZRANGEBYSCORE", + "Name": "ZRANGEBYSCORE", + "Summary": "Returns members in a sorted set within a range of scores.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements being returned. If M is constant (e.g. always asking for the first 10 elements with LIMIT), you can consider it O(log(N)).", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060ZRANGE\u0060 with the \u0060BYSCORE\u0060 argument", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "LIMIT", + "Type": "Block", + "Token": "LIMIT", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + } + ] + } + ] + }, + { + "Command": "ZRANK", + "Name": "ZRANK", + "Summary": "Returns the index of a member in a sorted set ordered by ascending scores.", + "Group": "SortedSet", + "Complexity": "O(log(N))", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORE", + "DisplayText": "withscore", + "Type": "PureToken", + "Token": "WITHSCORE", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZREM", + "Name": "ZREM", + "Summary": "Removes one or more members from a sorted set. Deletes the sorted set if all members were removed.", + "Group": "SortedSet", + "Complexity": "O(M*log(N)) with N being the number of elements in the sorted set and M the number of elements to be removed.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "ZREMRANGEBYLEX", + "Name": "ZREMRANGEBYLEX", + "Summary": "Removes members in a sorted set within a lexicographical range. Deletes the sorted set if all members were removed.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements removed by the operation.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "String" + } + ] + }, + { + "Command": "ZREMRANGEBYRANK", + "Name": "ZREMRANGEBYRANK", + "Summary": "Removes members in a sorted set within a range of indexes. Deletes the sorted set if all members were removed.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements removed by the operation.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STOP", + "DisplayText": "stop", + "Type": "Integer" + } + ] + }, + { + "Command": "ZREMRANGEBYSCORE", + "Name": "ZREMRANGEBYSCORE", + "Summary": "Removes members in a sorted set within a range of scores. Deletes the sorted set if all members were removed.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements removed by the operation.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "Double" + } + ] + }, + { + "Command": "ZREVRANGE", + "Name": "ZREVRANGE", + "Summary": "Returns members in a sorted set within a range of indexes in reverse order.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements returned.", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060ZRANGE\u0060 with the \u0060REV\u0060 argument", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "START", + "DisplayText": "start", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "STOP", + "DisplayText": "stop", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZREVRANGEBYSCORE", + "Name": "ZREVRANGEBYSCORE", + "Summary": "Returns members in a sorted set within a range of scores in reverse order.", + "Group": "SortedSet", + "Complexity": "O(log(N)\u002BM) with N being the number of elements in the sorted set and M the number of elements being returned. If M is constant (e.g. always asking for the first 10 elements with LIMIT), you can consider it O(log(N)).", + "DocFlags": "Deprecated", + "ReplacedBy": "\u0060ZRANGE\u0060 with the \u0060REV\u0060 and \u0060BYSCORE\u0060 arguments", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MAX", + "DisplayText": "max", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MIN", + "DisplayText": "min", + "Type": "Double" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORES", + "DisplayText": "withscores", + "Type": "PureToken", + "Token": "WITHSCORES", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "LIMIT", + "Type": "Block", + "Token": "LIMIT", + "ArgumentFlags": "Optional", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFFSET", + "DisplayText": "offset", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer" + } + ] + } + ] + }, + { + "Command": "ZREVRANK", + "Name": "ZREVRANK", + "Summary": "Returns the index of a member in a sorted set ordered by descending scores.", + "Group": "SortedSet", + "Complexity": "O(log(N))", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "WITHSCORE", + "DisplayText": "withscore", + "Type": "PureToken", + "Token": "WITHSCORE", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZSCAN", + "Name": "ZSCAN", + "Summary": "Iterates over members and scores of a sorted set.", + "Group": "SortedSet", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "ZSCORE", + "Name": "ZSCORE", + "Summary": "Returns the score of a member in a sorted set.", + "Group": "SortedSet", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MEMBER", + "DisplayText": "member", + "Type": "String" + } + ] + } +] \ No newline at end of file diff --git a/libs/server/Resp/RespCommandsInfo.json b/libs/resources/RespCommandsInfo.json similarity index 71% rename from libs/server/Resp/RespCommandsInfo.json rename to libs/resources/RespCommandsInfo.json index f4a830a00d..a46327039a 100644 --- a/libs/server/Resp/RespCommandsInfo.json +++ b/libs/resources/RespCommandsInfo.json @@ -2,141 +2,88 @@ { "Command": "ACL", "Name": "ACL", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "ACL", + "Command": "ACL_CAT", "Name": "ACL|CAT", - "IsInternal": false, "Arity": -2, "Flags": "Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { - "Command": "ACL", + "Command": "ACL_DELUSER", "Name": "ACL|DELUSER", - "IsInternal": false, "Arity": -3, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "Tips": [ + "request_policy:all_nodes", + "response_policy:all_succeeded" + ] }, { - "Command": "ACL", + "Command": "ACL_LIST", "Name": "ACL|LIST", - "IsInternal": false, "Arity": 2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "ACL", + "Command": "ACL_LOAD", "Name": "ACL|LOAD", - "IsInternal": false, "Arity": 2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "ACL", + "Command": "ACL_SAVE", "Name": "ACL|SAVE", - "IsInternal": false, "Arity": 2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "Tips": [ + "request_policy:all_nodes", + "response_policy:all_succeeded" + ] }, { - "Command": "ACL", + "Command": "ACL_SETUSER", "Name": "ACL|SETUSER", - "IsInternal": false, "Arity": -3, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "Tips": [ + "request_policy:all_nodes", + "response_policy:all_succeeded" + ] }, { - "Command": "ACL", + "Command": "ACL_USERS", "Name": "ACL|USERS", - "IsInternal": false, "Arity": 2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "ACL", + "Command": "ACL_WHOAMI", "Name": "ACL|WHOAMI", - "IsInternal": false, "Arity": 2, "Flags": "Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" } ] }, { "Command": "APPEND", "Name": "APPEND", - "IsInternal": false, "Arity": 3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -149,79 +96,47 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "ASKING", "Name": "ASKING", - "IsInternal": false, "Arity": 1, "Flags": "Fast", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "ASYNC", "Name": "ASYNC", - "IsInternal": false, "Arity": 1, "Flags": "NoMulti, NoScript, ReadOnly", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { "Command": "AUTH", "Name": "AUTH", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Loading, NoAuth, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "BGSAVE", "Name": "BGSAVE", - "IsInternal": false, "Arity": -1, "Flags": "Admin, NoAsyncLoading, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "BITCOUNT", "Name": "BITCOUNT", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -234,23 +149,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "BITFIELD", "Name": "BITFIELD", - "IsInternal": false, "Arity": -2, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -266,20 +177,17 @@ "Notes": "This command allows both access and modification of the key", "Flags": "RW, Access, Update, VariableFlags" } - ], - "SubCommands": null + ] }, { "Command": "BITFIELD_RO", "Name": "BITFIELD_RO", - "IsInternal": false, "Arity": -2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -292,23 +200,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "BITOP", "Name": "BITOP", - "IsInternal": false, "Arity": -4, "Flags": "DenyOom, Write", "FirstKey": 2, "LastKey": -1, "Step": 1, "AclCategories": "Bitmap, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -321,7 +225,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" }, { @@ -335,23 +238,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "BITPOS", "Name": "BITPOS", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -364,23 +263,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "BLMOVE", "Name": "BLMOVE", - "IsInternal": false, "Arity": 6, - "Flags": "Blocking, DenyOom, NoScript, Write", + "Flags": "Blocking, DenyOom, Write", "FirstKey": 1, "LastKey": 2, "Step": 1, "AclCategories": "Blocking, List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -393,7 +288,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" }, { @@ -407,23 +301,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "BLPOP", "Name": "BLPOP", - "IsInternal": false, "Arity": -3, - "Flags": "Blocking, NoScript, Write", + "Flags": "Blocking, Write", "FirstKey": 1, "LastKey": -2, "Step": 1, "AclCategories": "Blocking, List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -436,23 +326,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "BRPOP", "Name": "BRPOP", - "IsInternal": false, "Arity": -3, - "Flags": "Blocking, NoScript, Write", + "Flags": "Blocking, Write", "FirstKey": 1, "LastKey": -2, "Step": 1, "AclCategories": "Blocking, List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -465,834 +351,519 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "CLIENT", "Name": "CLIENT", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "SubCommands": [ + { + "Command": "CLIENT_ID", + "Name": "CLIENT|ID", + "Arity": 2, + "Flags": "Loading, NoScript, Stale", + "AclCategories": "Connection, Slow" + }, + { + "Command": "CLIENT_INFO", + "Name": "CLIENT|INFO", + "Arity": 2, + "Flags": "Loading, NoScript, Stale", + "AclCategories": "Connection, Slow", + "Tips": [ + "nondeterministic_output" + ] + }, + { + "Command": "CLIENT_KILL", + "Name": "CLIENT|KILL", + "Arity": -3, + "Flags": "Admin, Loading, NoScript, Stale", + "AclCategories": "Admin, Connection, Dangerous, Slow" + }, + { + "Command": "CLIENT_LIST", + "Name": "CLIENT|LIST", + "Arity": -2, + "Flags": "Admin, Loading, NoScript, Stale", + "AclCategories": "Admin, Connection, Dangerous, Slow", + "Tips": [ + "nondeterministic_output" + ] + } + ] }, { "Command": "CLUSTER", "Name": "CLUSTER", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "CLUSTER", + "Command": "CLUSTER_AOFSYNC", "Name": "CLUSTER|AOFSYNC", "IsInternal": true, "Arity": 3, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_APPENDLOG", "Name": "CLUSTER|APPENDLOG", "IsInternal": true, "Arity": 6, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_BANLIST", "Name": "CLUSTER|BANLIST", "IsInternal": true, "Arity": 1, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_BEGIN_REPLICA_RECOVER", "Name": "CLUSTER|BEGIN_REPLICA_RECOVER", "IsInternal": true, "Arity": 8, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELKEYSINSLOT", "Name": "CLUSTER|DELKEYSINSLOT", "IsInternal": true, "Arity": 2, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELKEYSINSLOTRANGE", "Name": "CLUSTER|DELKEYSINSLOTRANGE", "IsInternal": true, "Arity": -3, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_ENDPOINT", "Name": "CLUSTER|ENDPOINT", "IsInternal": true, "Arity": 2, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FAILSTOPWRITES", "Name": "CLUSTER|FAILSTOPWRITES", "IsInternal": true, "Arity": 2, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FAILREPLICATIONOFFSET", "Name": "CLUSTER|FAILREPLICATIONOFFSET", "IsInternal": true, "Arity": 2, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_GOSSIP", "Name": "CLUSTER|GOSSIP", "IsInternal": true, "Arity": -2, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_HELP", "Name": "CLUSTER|HELP", - "IsInternal": false, "Arity": 1, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_INITIATE_REPLICA_SYNC", "Name": "CLUSTER|INITIATE_REPLICA_SYNC", "IsInternal": true, "Arity": 6, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MIGRATE", "Name": "CLUSTER|MIGRATE", "IsInternal": true, "Arity": 4, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MTASKS", "Name": "CLUSTER|MTASKS", "IsInternal": true, "Arity": 1, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MYPARENTID", "Name": "CLUSTER|MYPARENTID", "IsInternal": true, "Arity": 1, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SEND_CKPT_FILE_SEGMENT", "Name": "CLUSTER|SEND_CKPT_FILE_SEGMENT", "IsInternal": true, "Arity": 6, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SEND_CKPT_METADATA", "Name": "CLUSTER|SEND_CKPT_METADATA", "IsInternal": true, "Arity": 4, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SETSLOTSRANGE", "Name": "CLUSTER|SETSLOTSRANGE", "IsInternal": true, "Arity": -4, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SLOTSTATE", "Name": "CLUSTER|SLOTSTATE", "IsInternal": true, "Arity": 1, "Flags": "Admin, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow, Garnet" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_ADDSLOTS", "Name": "CLUSTER|ADDSLOTS", - "IsInternal": false, "Arity": -3, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_ADDSLOTSRANGE", "Name": "CLUSTER|ADDSLOTSRANGE", - "IsInternal": false, "Arity": -4, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_BUMPEPOCH", "Name": "CLUSTER|BUMPEPOCH", - "IsInternal": false, "Arity": 2, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_COUNTKEYSINSLOT", "Name": "CLUSTER|COUNTKEYSINSLOT", - "IsInternal": false, "Arity": 3, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELSLOTS", "Name": "CLUSTER|DELSLOTS", - "IsInternal": false, "Arity": -3, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELSLOTSRANGE", "Name": "CLUSTER|DELSLOTSRANGE", - "IsInternal": false, "Arity": -4, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FAILOVER", "Name": "CLUSTER|FAILOVER", - "IsInternal": false, "Arity": -2, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FORGET", "Name": "CLUSTER|FORGET", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_GETKEYSINSLOT", "Name": "CLUSTER|GETKEYSINSLOT", - "IsInternal": false, "Arity": 4, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_INFO", "Name": "CLUSTER|INFO", - "IsInternal": false, "Arity": 2, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_KEYSLOT", "Name": "CLUSTER|KEYSLOT", - "IsInternal": false, "Arity": 3, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MEET", "Name": "CLUSTER|MEET", - "IsInternal": false, "Arity": -4, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MYID", "Name": "CLUSTER|MYID", - "IsInternal": false, "Arity": 2, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_NODES", "Name": "CLUSTER|NODES", - "IsInternal": false, "Arity": 2, "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_REPLICAS", "Name": "CLUSTER|REPLICAS", - "IsInternal": false, "Arity": 3, "Flags": "Admin, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_REPLICATE", "Name": "CLUSTER|REPLICATE", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_RESET", "Name": "CLUSTER|RESET", - "IsInternal": false, "Arity": -2, "Flags": "Admin, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SETCONFIGEPOCH", "Name": "CLUSTER|SET-CONFIG-EPOCH", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SETSLOT", "Name": "CLUSTER|SETSLOT", - "IsInternal": false, "Arity": -4, "Flags": "Admin, NoAsyncLoading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SHARDS", "Name": "CLUSTER|SHARDS", - "IsInternal": false, "Arity": 2, - "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, + "Flags": "Loading, Stale", "AclCategories": "Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SLOTS", "Name": "CLUSTER|SLOTS", - "IsInternal": false, "Arity": 2, - "Flags": "Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, + "Flags": "Loading, Stale", "AclCategories": "Slow", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] } ] }, { "Command": "COMMAND", "Name": "COMMAND", - "IsInternal": false, "Arity": -1, "Flags": "Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Connection, Slow", "Tips": [ "nondeterministic_output_order" ], - "KeySpecifications": null, "SubCommands": [ { - "Command": "COMMAND", + "Command": "COMMAND_COUNT", "Name": "COMMAND|COUNT", - "IsInternal": false, "Arity": 2, "Flags": "Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, + "AclCategories": "Connection, Slow" + }, + { + "Command": "COMMAND_DOCS", + "Name": "COMMAND|DOCS", + "Arity": -2, + "Flags": "Loading, Stale", "AclCategories": "Connection, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "Tips": [ + "nondeterministic_output_order" + ] }, { - "Command": "COMMAND", + "Command": "COMMAND_INFO", "Name": "COMMAND|INFO", - "IsInternal": false, "Arity": -2, "Flags": "Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Connection, Slow", "Tips": [ "nondeterministic_output_order" - ], - "KeySpecifications": null, - "SubCommands": null + ] } ] }, { "Command": "COMMITAOF", "Name": "COMMITAOF", - "IsInternal": false, "Arity": -1, "Flags": "Admin, NoMulti, NoScript, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Admin, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Garnet" }, { "Command": "CONFIG", "Name": "CONFIG", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "CONFIG", + "Command": "CONFIG_GET", "Name": "CONFIG|GET", - "IsInternal": false, "Arity": -3, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { - "Command": "CONFIG", + "Command": "CONFIG_REWRITE", "Name": "CONFIG|REWRITE", - "IsInternal": false, "Arity": 2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "Tips": [ + "request_policy:all_nodes", + "response_policy:all_succeeded" + ] }, { - "Command": "CONFIG", + "Command": "CONFIG_SET", "Name": "CONFIG|SET", - "IsInternal": false, "Arity": -4, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", "Tips": [ "request_policy:all_nodes", "response_policy:all_succeeded" - ], - "KeySpecifications": null, - "SubCommands": null + ] } ] }, { "Command": "COSCAN", "Name": "COSCAN", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Read, Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Read, Slow, Garnet" }, { "Command": "CustomObjCmd", "Name": "CustomObjCmd", - "IsInternal": true, "Arity": 1, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Dangerous, Slow, Garnet, Custom", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Dangerous, Slow, Garnet, Custom" }, { "Command": "CustomProcedure", "Name": "CustomProcedure", - "IsInternal": true, "Arity": 1, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Dangerous, Slow, Garnet, Custom", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Dangerous, Slow, Garnet, Custom" }, { "Command": "CustomRawStringCmd", "Name": "CustomRawStringCmd", - "IsInternal": true, "Arity": 1, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Dangerous, Slow, Garnet, Custom", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Dangerous, Slow, Garnet, Custom" }, { "Command": "CustomTxn", "Name": "CustomTxn", - "IsInternal": true, "Arity": 1, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Dangerous, Slow, Garnet, Custom", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Dangerous, Slow, Garnet, Custom" }, { "Command": "DBSIZE", "Name": "DBSIZE", - "IsInternal": false, "Arity": 1, "Flags": "Fast, ReadOnly", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Fast, KeySpace, Read", "Tips": [ "request_policy:all_shards", "response_policy:agg_sum" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "DECR", "Name": "DECR", - "IsInternal": false, "Arity": 2, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1305,23 +876,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "DECRBY", "Name": "DECRBY", - "IsInternal": false, "Arity": 3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1334,16 +901,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "DEL", "Name": "DEL", - "IsInternal": false, "Arity": -2, "Flags": "Write", "FirstKey": 1, @@ -1366,51 +930,30 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RM, Delete" } - ], - "SubCommands": null + ] }, { "Command": "DISCARD", "Name": "DISCARD", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Fast, Transaction", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, Transaction" }, { "Command": "ECHO", "Name": "ECHO", - "IsInternal": false, "Arity": 2, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "EVAL", "Name": "EVAL", - "IsInternal": false, "Arity": -3, "Flags": "MovableKeys, NoMandatoryKeys, NoScript, SkipMonitor, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Scripting, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1426,20 +969,14 @@ "Notes": "We cannot tell how the keys will be used so we assume the worst, RW and UPDATE", "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "EVALSHA", "Name": "EVALSHA", - "IsInternal": false, "Arity": -3, "Flags": "MovableKeys, NoMandatoryKeys, NoScript, SkipMonitor, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Scripting, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1452,30 +989,20 @@ "FirstKey": 1, "KeyStep": 1 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "EXEC", "Name": "EXEC", - "IsInternal": false, "Arity": 1, "Flags": "Loading, NoScript, SkipSlowLog, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow, Transaction", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow, Transaction" }, { "Command": "EXISTS", "Name": "EXISTS", - "IsInternal": false, "Arity": -2, "Flags": "Fast, ReadOnly", "FirstKey": 1, @@ -1498,15 +1025,38 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "EXPIRE", "Name": "EXPIRE", + "Arity": -3, + "Flags": "Fast, Write", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Fast, KeySpace, Write", + "KeySpecifications": [ + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 1 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Flags": "RW, Update" + } + ] + }, + { + "Command": "EXPIREAT", + "Name": "EXPIREAT", "IsInternal": false, "Arity": -3, "Flags": "Fast, Write", @@ -1534,78 +1084,82 @@ "SubCommands": null }, { - "Command": "FAILOVER", - "Name": "FAILOVER", + "Command": "EXPIRETIME", + "Name": "EXPIRETIME", "IsInternal": false, - "Arity": -1, - "Flags": "Admin, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, + "Arity": 2, + "Flags": "Fast, ReadOnly", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Fast, KeySpace, Read", + "Tips": null, + "KeySpecifications": [ + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 1 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Notes": null, + "Flags": "RO, Access" + } + ], "SubCommands": null }, + { + "Command": "FAILOVER", + "Name": "FAILOVER", + "Arity": -1, + "Flags": "Admin, NoScript, Stale", + "AclCategories": "Admin, Dangerous, Slow" + }, { "Command": "FLUSHALL", "Name": "FLUSHALL", - "IsInternal": false, "Arity": -1, "Flags": "Write", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Dangerous, KeySpace, Slow, Write", "Tips": [ "request_policy:all_shards", "response_policy:all_succeeded" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "FLUSHDB", "Name": "FLUSHDB", - "IsInternal": false, "Arity": -1, "Flags": "Write", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Dangerous, KeySpace, Slow, Write", "Tips": [ "request_policy:all_shards", "response_policy:all_succeeded" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "FORCEGC", "Name": "FORCEGC", - "IsInternal": false, "Arity": -1, "Flags": "Admin, NoMulti, NoScript, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Admin, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Garnet" }, { "Command": "GEOADD", "Name": "GEOADD", - "IsInternal": false, "Arity": -5, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Geo, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1618,23 +1172,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "GEODIST", "Name": "GEODIST", - "IsInternal": false, "Arity": -4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Geo, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1647,23 +1197,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GEOHASH", "Name": "GEOHASH", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Geo, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1676,23 +1222,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GEOPOS", "Name": "GEOPOS", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Geo, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1705,23 +1247,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GEOSEARCH", "Name": "GEOSEARCH", - "IsInternal": false, "Arity": -7, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Geo, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1734,23 +1272,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GET", "Name": "GET", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, String", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1763,23 +1297,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GETBIT", "Name": "GETBIT", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1792,23 +1322,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "GETDEL", "Name": "GETDEL", - "IsInternal": false, "Arity": 2, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1821,23 +1347,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "GETRANGE", "Name": "GETRANGE", - "IsInternal": false, "Arity": 4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Read, Slow, String", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1850,23 +1372,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HDEL", "Name": "HDEL", - "IsInternal": false, "Arity": -3, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1879,37 +1397,26 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "HELLO", "Name": "HELLO", - "IsInternal": false, "Arity": -1, "Flags": "Fast, Loading, NoAuth, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "HEXISTS", "Name": "HEXISTS", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1922,23 +1429,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "HGET", "Name": "HGET", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -1951,16 +1454,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HGETALL", "Name": "HGETALL", - "IsInternal": false, "Arity": 2, "Flags": "ReadOnly", "FirstKey": 1, @@ -1982,23 +1482,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HINCRBY", "Name": "HINCRBY", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2011,23 +1507,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "HINCRBYFLOAT", "Name": "HINCRBYFLOAT", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2040,16 +1532,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "HKEYS", "Name": "HKEYS", - "IsInternal": false, "Arity": 2, "Flags": "ReadOnly", "FirstKey": 1, @@ -2071,23 +1560,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HLEN", "Name": "HLEN", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2100,23 +1585,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "HMGET", "Name": "HMGET", - "IsInternal": false, "Arity": -3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2129,23 +1610,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HMSET", "Name": "HMSET", - "IsInternal": false, "Arity": -4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2158,16 +1635,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "HRANDFIELD", "Name": "HRANDFIELD", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -2189,16 +1663,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HSCAN", "Name": "HSCAN", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 1, @@ -2220,23 +1691,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "HSET", "Name": "HSET", - "IsInternal": false, "Arity": -4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2249,23 +1716,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "HSETNX", "Name": "HSETNX", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2278,23 +1741,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "HSTRLEN", "Name": "HSTRLEN", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Hash, Fast, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2307,16 +1766,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "HVALS", "Name": "HVALS", - "IsInternal": false, "Arity": 2, "Flags": "ReadOnly", "FirstKey": 1, @@ -2338,23 +1794,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "INCR", "Name": "INCR", - "IsInternal": false, "Arity": 2, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2367,23 +1819,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "INCRBY", "Name": "INCRBY", - "IsInternal": false, "Arity": 3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2396,138 +1844,89 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "INFO", "Name": "INFO", - "IsInternal": false, "Arity": -1, "Flags": "Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Dangerous, Slow", "Tips": [ "nondeterministic_output", "request_policy:all_shards", "response_policy:special" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "KEYS", "Name": "KEYS", - "IsInternal": false, "Arity": 2, "Flags": "ReadOnly", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Dangerous, KeySpace, Read, Slow", "Tips": [ "request_policy:all_shards", "nondeterministic_output_order" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "LASTSAVE", "Name": "LASTSAVE", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Fast", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "LATENCY", "Name": "LATENCY", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "LATENCY", + "Command": "LATENCY_HELP", "Name": "LATENCY|HELP", - "IsInternal": false, - "Arity": 0, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow, Garnet" }, { - "Command": "LATENCY", + "Command": "LATENCY_HISTOGRAM", "Name": "LATENCY|HISTOGRAM", - "IsInternal": false, "Arity": -2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", "Tips": [ "nondeterministic_output", "request_policy:all_nodes", "response_policy:special" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { - "Command": "LATENCY", + "Command": "LATENCY_RESET", "Name": "LATENCY|RESET", - "IsInternal": false, "Arity": -2, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Admin, Dangerous, Slow", "Tips": [ "request_policy:all_nodes", - "response_policy:all_succeeded" - ], - "KeySpecifications": null, - "SubCommands": null + "response_policy:agg_sum" + ] } ] }, { "Command": "LINDEX", "Name": "LINDEX", - "IsInternal": false, "Arity": 3, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2540,23 +1939,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "LINSERT", "Name": "LINSERT", - "IsInternal": false, "Arity": 5, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2569,23 +1964,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "LLEN", "Name": "LLEN", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2598,23 +1989,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "LMOVE", "Name": "LMOVE", - "IsInternal": false, "Arity": 5, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 2, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2627,7 +2014,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" }, { @@ -2641,23 +2027,16 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "LMPOP", "Name": "LMPOP", - "IsInternal": false, - "Arity": 5, - "Flags": "DenyOom, Write", - "FirstKey": 1, - "LastKey": 2, - "Step": 1, + "Arity": -4, + "Flags": "MovableKeys, Write", "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2665,18 +2044,29 @@ "Index": 1 }, "FindKeys": { - "TypeDiscriminator": "FindKeysRange", - "LastKey": 0, - "KeyStep": 1, - "Limit": 0 + "TypeDiscriminator": "FindKeysKeyNum", + "KeyNumIdx": 0, + "FirstKey": 1, + "KeyStep": 1 }, - "Notes": null, "Flags": "RW, Access, Delete" - }, + } + ] + }, + { + "Command": "LPOP", + "Name": "LPOP", + "Arity": -2, + "Flags": "Fast, Write", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Fast, List, Write", + "KeySpecifications": [ { "BeginSearch": { "TypeDiscriminator": "BeginSearchIndex", - "Index": 2 + "Index": 1 }, "FindKeys": { "TypeDiscriminator": "FindKeysRange", @@ -2684,22 +2074,20 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, - "Flags": "RW, Insert" + "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { - "Command": "LPOP", - "Name": "LPOP", + "Command": "LPOS", + "Name": "LPOS", "IsInternal": false, - "Arity": -2, - "Flags": "Fast, Write", + "Arity": -3, + "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Fast, List, Write", + "AclCategories": "List, Read, Slow", "Tips": null, "KeySpecifications": [ { @@ -2714,7 +2102,7 @@ "Limit": 0 }, "Notes": null, - "Flags": "RW, Access, Delete" + "Flags": "RO, Access" } ], "SubCommands": null @@ -2722,14 +2110,12 @@ { "Command": "LPUSH", "Name": "LPUSH", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2742,23 +2128,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "LPUSHX", "Name": "LPUSHX", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2771,23 +2153,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "LRANGE", "Name": "LRANGE", - "IsInternal": false, "Arity": 4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2800,23 +2178,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "LREM", "Name": "LREM", - "IsInternal": false, "Arity": 4, "Flags": "Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2829,23 +2203,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "LSET", "Name": "LSET", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2858,23 +2228,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "LTRIM", "Name": "LTRIM", - "IsInternal": false, "Arity": 4, "Flags": "Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2887,36 +2253,25 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "MEMORY", "Name": "MEMORY", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "MEMORY", + "Command": "MEMORY_USAGE", "Name": "MEMORY|USAGE", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 2, "LastKey": 2, "Step": 1, "AclCategories": "Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -2929,18 +2284,15 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] } ] }, { "Command": "MGET", "Name": "MGET", - "IsInternal": false, "Arity": -2, "Flags": "Fast, ReadOnly", "FirstKey": 1, @@ -2962,16 +2314,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "MIGRATE", "Name": "MIGRATE", - "IsInternal": false, "Arity": -6, "Flags": "MovableKeys, Write", "FirstKey": 3, @@ -2993,7 +2342,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" }, { @@ -3008,59 +2356,35 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete, Incomplete" } - ], - "SubCommands": null + ] }, { "Command": "MODULE", "Name": "MODULE", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, "SubCommands": [ { - "Command": "MODULE", + "Command": "MODULE_LOADCS", "Name": "MODULE|LOADCS", - "IsInternal": false, "Arity": -3, "Flags": "Admin, NoAsyncLoading, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" } ] }, { "Command": "MONITOR", "Name": "MONITOR", - "IsInternal": false, "Arity": 1, "Flags": "Admin, Loading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "MSET", "Name": "MSET", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, @@ -3083,26 +2407,19 @@ "KeyStep": 2, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" } - ], - "SubCommands": null + ] }, { "Command": "MSETNX", "Name": "MSETNX", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": -1, "Step": 2, "AclCategories": "Slow, String, Write", - "Tips": [ - "request_policy:multi_shard", - "response_policy:agg_min" - ], "KeySpecifications": [ { "BeginSearch": { @@ -3115,37 +2432,26 @@ "KeyStep": 2, "Limit": 0 }, - "Notes": null, "Flags": "OW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "MULTI", "Name": "MULTI", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Fast, Transaction", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, Transaction" }, { "Command": "PERSIST", "Name": "PERSIST", - "IsInternal": false, "Arity": 2, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, KeySpace, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3158,15 +2464,38 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "PEXPIRE", "Name": "PEXPIRE", + "Arity": -3, + "Flags": "Fast, Write", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Fast, KeySpace, Write", + "KeySpecifications": [ + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 1 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Flags": "RW, Update" + } + ] + }, + { + "Command": "PEXPIREAT", + "Name": "PEXPIREAT", "IsInternal": false, "Arity": -3, "Flags": "Fast, Write", @@ -3193,17 +2522,44 @@ ], "SubCommands": null }, + { + "Command": "PEXPIRETIME", + "Name": "PEXPIRETIME", + "IsInternal": false, + "Arity": 2, + "Flags": "Fast, ReadOnly", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Fast, KeySpace, Read", + "Tips": null, + "KeySpecifications": [ + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 1 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Notes": null, + "Flags": "RO, Access" + } + ], + "SubCommands": null + }, { "Command": "PFADD", "Name": "PFADD", - "IsInternal": false, "Arity": -2, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "HyperLogLog, Fast, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3216,23 +2572,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "PFCOUNT", "Name": "PFCOUNT", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "HyperLogLog, Read, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3248,20 +2600,17 @@ "Notes": "RW because it may change the internal representation of the key, and propagate to replicas", "Flags": "RW, Access" } - ], - "SubCommands": null + ] }, { "Command": "PFMERGE", "Name": "PFMERGE", - "IsInternal": false, "Arity": -2, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "HyperLogLog, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3274,7 +2623,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Insert" }, { @@ -3288,40 +2636,30 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "PING", "Name": "PING", - "IsInternal": false, "Arity": -1, "Flags": "Fast", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Connection, Fast", "Tips": [ "request_policy:all_shards", "response_policy:all_succeeded" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "PSETEX", "Name": "PSETEX", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Slow, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3334,30 +2672,20 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" } - ], - "SubCommands": null + ] }, { "Command": "PSUBSCRIBE", "Name": "PSUBSCRIBE", - "IsInternal": false, "Arity": -2, "Flags": "Loading, NoScript, PubSub, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "PubSub, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "PubSub, Slow" }, { "Command": "PTTL", "Name": "PTTL", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, @@ -3379,36 +2707,34 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "PUBLISH", "Name": "PUBLISH", - "IsInternal": false, "Arity": 3, "Flags": "Fast, Loading, PubSub, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Fast, PubSub", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, PubSub" }, { "Command": "PUNSUBSCRIBE", "Name": "PUNSUBSCRIBE", - "IsInternal": false, "Arity": -1, "Flags": "Loading, NoScript, PubSub, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "PubSub, Slow", + "AclCategories": "PubSub, Slow" + }, + { + "Command": "PURGEBP", + "Name": "PURGEBP", + "IsInternal": false, + "Arity": 2, + "Flags": "Admin, NoMulti, NoScript, ReadOnly", + "FirstKey": 1, + "LastKey": 1, + "Step": 1, + "AclCategories": "Admin, Garnet", "Tips": null, "KeySpecifications": null, "SubCommands": null @@ -3416,69 +2742,82 @@ { "Command": "QUIT", "Name": "QUIT", - "IsInternal": false, "Arity": -1, "Flags": "Fast, Loading, NoAuth, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "READONLY", "Name": "READONLY", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "READWRITE", "Name": "READWRITE", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "REGISTERCS", "Name": "REGISTERCS", - "IsInternal": false, "Arity": -5, "Flags": "Admin, NoMulti, NoScript, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Admin, Dangerous, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Garnet" }, { "Command": "RENAME", "Name": "RENAME", - "IsInternal": false, "Arity": 3, "Flags": "Write", "FirstKey": 1, "LastKey": 2, "Step": 1, "AclCategories": "KeySpace, Slow, Write", + "KeySpecifications": [ + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 1 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Flags": "RW, Access, Delete" + }, + { + "BeginSearch": { + "TypeDiscriminator": "BeginSearchIndex", + "Index": 2 + }, + "FindKeys": { + "TypeDiscriminator": "FindKeysRange", + "LastKey": 0, + "KeyStep": 1, + "Limit": 0 + }, + "Flags": "OW, Update" + } + ] + }, + { + "Command": "RENAMENX", + "Name": "RENAMENX", + "IsInternal": false, + "Arity": 3, + "Flags": "Fast, Write", + "FirstKey": 1, + "LastKey": 2, + "Step": 1, + "AclCategories": "Fast, KeySpace, Write", "Tips": null, "KeySpecifications": [ { @@ -3507,7 +2846,7 @@ "Limit": 0 }, "Notes": null, - "Flags": "OW, Update" + "Flags": "OW, Insert" } ], "SubCommands": null @@ -3515,28 +2854,19 @@ { "Command": "REPLICAOF", "Name": "REPLICAOF", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "RPOP", "Name": "RPOP", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3549,23 +2879,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "RPOPLPUSH", "Name": "RPOPLPUSH", - "IsInternal": false, "Arity": 3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 2, "Step": 1, "AclCategories": "List, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3578,7 +2904,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" }, { @@ -3592,23 +2917,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "RPUSH", "Name": "RPUSH", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3621,23 +2942,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "RPUSHX", "Name": "RPUSHX", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, List, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3650,37 +2967,29 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "RUNTXP", "Name": "RUNTXP", - "IsInternal": false, "Arity": -2, "Flags": "NoMulti, NoScript", "FirstKey": 1, "LastKey": 1, "Step": 1, - "AclCategories": "Transaction, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Transaction, Garnet" }, { "Command": "SADD", "Name": "SADD", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Set, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3693,54 +3002,38 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "SAVE", "Name": "SAVE", - "IsInternal": false, "Arity": 1, "Flags": "Admin, NoAsyncLoading, NoMulti, NoScript", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "SCAN", "Name": "SCAN", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "KeySpace, Read, Slow", "Tips": [ "nondeterministic_output", - "request_policy:special" - ], - "KeySpecifications": null, - "SubCommands": null + "request_policy:special", + "response_policy:special" + ] }, { "Command": "SCARD", "Name": "SCARD", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, Set", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3753,30 +3046,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "SCRIPT", "Name": "SCRIPT", - "IsInternal": false, "Arity": -2, - "Flags": "None", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Slow" }, { "Command": "SDIFF", "Name": "SDIFF", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -3798,23 +3080,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SDIFFSTORE", "Name": "SDIFFSTORE", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "Set, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3827,7 +3105,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" }, { @@ -3841,51 +3118,33 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SECONDARYOF", "Name": "SECONDARYOF", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "SELECT", "Name": "SELECT", - "IsInternal": false, "Arity": 2, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Connection, Fast", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Connection, Fast" }, { "Command": "SET", "Name": "SET", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Slow, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3901,20 +3160,17 @@ "Notes": "RW and ACCESS due to the optional \u0060GET\u0060 argument", "Flags": "RW, Access, Update, VariableFlags" } - ], - "SubCommands": null + ] }, { "Command": "SETBIT", "Name": "SETBIT", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Bitmap, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3927,23 +3183,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "SETEX", "Name": "SETEX", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Slow, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3956,23 +3208,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" } - ], - "SubCommands": null + ] }, { "Command": "SETRANGE", "Name": "SETRANGE", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Slow, String, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -3985,16 +3233,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "SINTER", "Name": "SINTER", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -4016,23 +3261,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SINTERSTORE", "Name": "SINTERSTORE", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "Set, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4045,7 +3286,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" }, { @@ -4059,23 +3299,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SISMEMBER", "Name": "SISMEMBER", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, Set", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4088,30 +3324,20 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "SECONDARYOF", "Name": "SLAVEOF", - "IsInternal": false, "Arity": 3, "Flags": "Admin, NoAsyncLoading, NoScript, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Admin, Dangerous, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Admin, Dangerous, Slow" }, { "Command": "SMEMBERS", "Name": "SMEMBERS", - "IsInternal": false, "Arity": 2, "Flags": "ReadOnly", "FirstKey": 1, @@ -4133,23 +3359,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SMOVE", "Name": "SMOVE", - "IsInternal": false, "Arity": 4, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 2, "Step": 1, "AclCategories": "Fast, Set, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4162,7 +3384,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" }, { @@ -4176,16 +3397,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Insert" } - ], - "SubCommands": null + ] }, { "Command": "SPOP", "Name": "SPOP", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Write", "FirstKey": 1, @@ -4207,16 +3425,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "SRANDMEMBER", "Name": "SRANDMEMBER", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -4238,23 +3453,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SREM", "Name": "SREM", - "IsInternal": false, "Arity": -3, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Set, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4267,16 +3478,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "SSCAN", "Name": "SSCAN", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 1, @@ -4298,23 +3506,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "STRLEN", "Name": "STRLEN", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, String", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4327,30 +3531,20 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "SUBSCRIBE", "Name": "SUBSCRIBE", - "IsInternal": false, "Arity": -2, "Flags": "Loading, NoScript, PubSub, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "PubSub, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "PubSub, Slow" }, { "Command": "SUNION", "Name": "SUNION", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -4372,23 +3566,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "SUNIONSTORE", "Name": "SUNIONSTORE", - "IsInternal": false, "Arity": -3, "Flags": "DenyOom, Write", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "Set, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4401,7 +3591,6 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "OW, Update" }, { @@ -4415,32 +3604,23 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "TIME", "Name": "TIME", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Fast", "Tips": [ "nondeterministic_output" - ], - "KeySpecifications": null, - "SubCommands": null + ] }, { "Command": "TTL", "Name": "TTL", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, @@ -4462,23 +3642,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "TYPE", "Name": "TYPE", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, KeySpace, Read", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4491,16 +3667,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "UNLINK", "Name": "UNLINK", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Write", "FirstKey": 1, @@ -4523,51 +3696,33 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RM, Delete" } - ], - "SubCommands": null + ] }, { "Command": "UNSUBSCRIBE", "Name": "UNSUBSCRIBE", - "IsInternal": false, "Arity": -1, "Flags": "Loading, NoScript, PubSub, Stale", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "PubSub, Slow", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "PubSub, Slow" }, { "Command": "UNWATCH", "Name": "UNWATCH", - "IsInternal": false, "Arity": 1, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, - "AclCategories": "Fast, Transaction", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, Transaction" }, { "Command": "WATCH", "Name": "WATCH", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", "FirstKey": 1, "LastKey": -1, "Step": 1, "AclCategories": "Fast, Transaction", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4580,52 +3735,41 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } ], "SubCommands": [ { - "Command": "WATCH", + "Command": "WATCH_MS", "Name": "WATCH|MS", - "IsInternal": false, "Arity": -3, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", "FirstKey": 2, "LastKey": -2, "Step": 1, - "AclCategories": "Fast, Transaction, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, Transaction, Garnet" }, { - "Command": "WATCH", + "Command": "WATCH_OS", "Name": "WATCH|OS", - "IsInternal": false, "Arity": -3, "Flags": "Fast, Loading, NoScript, Stale, AllowBusy", "FirstKey": 2, "LastKey": -2, "Step": 1, - "AclCategories": "Fast, Transaction, Garnet", - "Tips": null, - "KeySpecifications": null, - "SubCommands": null + "AclCategories": "Fast, Transaction, Garnet" } ] }, { "Command": "ZADD", "Name": "ZADD", - "IsInternal": false, "Arity": -4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, SortedSet, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4638,23 +3782,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Update" } - ], - "SubCommands": null + ] }, { "Command": "ZCARD", "Name": "ZCARD", - "IsInternal": false, "Arity": 2, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4667,23 +3807,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO" } - ], - "SubCommands": null + ] }, { "Command": "ZCOUNT", "Name": "ZCOUNT", - "IsInternal": false, "Arity": 4, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4696,23 +3832,16 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZDIFF", "Name": "ZDIFF", - "IsInternal": false, "Arity": -3, "Flags": "MovableKeys, ReadOnly", - "FirstKey": 0, - "LastKey": 0, - "Step": 0, "AclCategories": "Read, SortedSet, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4725,23 +3854,19 @@ "FirstKey": 1, "KeyStep": 1 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZINCRBY", "Name": "ZINCRBY", - "IsInternal": false, "Arity": 4, "Flags": "DenyOom, Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, SortedSet, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4754,23 +3879,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Update" } - ], - "SubCommands": null + ] }, { "Command": "ZLEXCOUNT", "Name": "ZLEXCOUNT", - "IsInternal": false, "Arity": 4, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4783,23 +3904,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZMSCORE", "Name": "ZMSCORE", - "IsInternal": false, "Arity": -3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4812,23 +3929,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZPOPMAX", "Name": "ZPOPMAX", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, SortedSet, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4841,23 +3954,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZPOPMIN", "Name": "ZPOPMIN", - "IsInternal": false, "Arity": -2, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, SortedSet, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4870,16 +3979,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Access, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZRANDMEMBER", "Name": "ZRANDMEMBER", - "IsInternal": false, "Arity": -2, "Flags": "ReadOnly", "FirstKey": 1, @@ -4901,23 +4007,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZRANGE", "Name": "ZRANGE", - "IsInternal": false, "Arity": -4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Read, SortedSet, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4930,23 +4032,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZRANGEBYSCORE", "Name": "ZRANGEBYSCORE", - "IsInternal": false, "Arity": -4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Read, SortedSet, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4959,23 +4057,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZRANK", "Name": "ZRANK", - "IsInternal": false, - "Arity": 3, + "Arity": -3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -4988,23 +4082,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZREM", "Name": "ZREM", - "IsInternal": false, "Arity": -3, "Flags": "Fast, Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, SortedSet, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5017,23 +4107,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZREMRANGEBYLEX", "Name": "ZREMRANGEBYLEX", - "IsInternal": false, "Arity": 4, "Flags": "Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "SortedSet, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5046,23 +4132,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZREMRANGEBYRANK", "Name": "ZREMRANGEBYRANK", - "IsInternal": false, "Arity": 4, "Flags": "Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "SortedSet, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5075,23 +4157,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZREMRANGEBYSCORE", "Name": "ZREMRANGEBYSCORE", - "IsInternal": false, "Arity": 4, "Flags": "Write", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "SortedSet, Slow, Write", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5104,23 +4182,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RW, Delete" } - ], - "SubCommands": null + ] }, { "Command": "ZREVRANGE", "Name": "ZREVRANGE", - "IsInternal": false, "Arity": -4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Read, SortedSet, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5133,23 +4207,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZREVRANGEBYSCORE", "Name": "ZREVRANGEBYSCORE", - "IsInternal": false, "Arity": -4, "Flags": "ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Read, SortedSet, Slow", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5162,23 +4232,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZREVRANK", "Name": "ZREVRANK", - "IsInternal": false, - "Arity": 3, + "Arity": -3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5191,16 +4257,13 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZSCAN", "Name": "ZSCAN", - "IsInternal": false, "Arity": -3, "Flags": "ReadOnly", "FirstKey": 1, @@ -5222,23 +4285,19 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] }, { "Command": "ZSCORE", "Name": "ZSCORE", - "IsInternal": false, "Arity": 3, "Flags": "Fast, ReadOnly", "FirstKey": 1, "LastKey": 1, "Step": 1, "AclCategories": "Fast, Read, SortedSet", - "Tips": null, "KeySpecifications": [ { "BeginSearch": { @@ -5251,10 +4310,8 @@ "KeyStep": 1, "Limit": 0 }, - "Notes": null, "Flags": "RO, Access" } - ], - "SubCommands": null + ] } ] \ No newline at end of file diff --git a/libs/server/ACL/ACLParser.cs b/libs/server/ACL/ACLParser.cs index adc425bc53..f033f3042a 100644 --- a/libs/server/ACL/ACLParser.cs +++ b/libs/server/ACL/ACLParser.cs @@ -276,7 +276,7 @@ static bool TryParseCommandForAcl(string commandName, out RespCommand command) throw new ACLException($"Couldn't load information for {command}, shouldn't be possible"); } - if (info.SubCommand != command) + if (info.Command != command) { return false; } diff --git a/libs/server/ACL/User.cs b/libs/server/ACL/User.cs index cc722907ba..699fc0ff5f 100644 --- a/libs/server/ACL/User.cs +++ b/libs/server/ACL/User.cs @@ -425,13 +425,11 @@ public string DescribeUser() /// internal static IEnumerable DetermineCommandDetails(IReadOnlyList infos) { - for (int i = 0; i < infos.Count; i++) + foreach (var info in infos) { - RespCommandsInfo info = infos[i]; - if (info.Parent != null) { - yield return info.SubCommand.Value; + yield return info.Command; } else { @@ -439,9 +437,9 @@ internal static IEnumerable DetermineCommandDetails(IReadOnlyList + public GarnetStatus EXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output) + => storageSession.EXPIRETIME(ref key, storeType, ref output, ref context, ref objectContext); + + /// + public GarnetStatus PEXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output) + => storageSession.EXPIRETIME(ref key, storeType, ref output, ref context, ref objectContext, milliseconds: true); + + #endregion + #region SET /// public GarnetStatus SET(ref SpanByte key, ref SpanByte value) @@ -154,6 +166,10 @@ public GarnetStatus APPEND(ArgSlice key, ArgSlice value, ref ArgSlice output) /// public GarnetStatus RENAME(ArgSlice oldKey, ArgSlice newKey, StoreType storeType = StoreType.All) => storageSession.RENAME(oldKey, newKey, storeType); + + /// + public GarnetStatus RENAMENX(ArgSlice oldKey, ArgSlice newKey, out int result, StoreType storeType = StoreType.All) + => storageSession.RENAMENX(oldKey, newKey, storeType, out result); #endregion #region EXISTS @@ -177,6 +193,18 @@ public GarnetStatus PEXPIRE(ArgSlice key, TimeSpan expiry, out bool timeoutSet, #endregion + #region EXPIREAT + + /// + public GarnetStatus EXPIREAT(ArgSlice key, long expiryTimestamp, out bool timeoutSet, StoreType storeType = StoreType.All, ExpireOption expireOption = ExpireOption.None) + => storageSession.EXPIREAT(key, expiryTimestamp, out timeoutSet, storeType, expireOption, ref context, ref objectContext); + + /// + public GarnetStatus PEXPIREAT(ArgSlice key, long expiryTimestamp, out bool timeoutSet, StoreType storeType = StoreType.All, ExpireOption expireOption = ExpireOption.None) + => storageSession.EXPIREAT(key, expiryTimestamp, out timeoutSet, storeType, expireOption, ref context, ref objectContext, milliseconds: true); + + #endregion + #region PERSIST /// public unsafe GarnetStatus PERSIST(ArgSlice key, StoreType storeType = StoreType.All) diff --git a/libs/server/API/GarnetApiObjectCommands.cs b/libs/server/API/GarnetApiObjectCommands.cs index 208cb8af26..a63d01b176 100644 --- a/libs/server/API/GarnetApiObjectCommands.cs +++ b/libs/server/API/GarnetApiObjectCommands.cs @@ -175,6 +175,10 @@ public GarnetStatus ListLeftPush(ArgSlice key, ArgSlice element, out int count, public GarnetStatus ListLeftPush(byte[] key, ref ObjectInput input, out ObjectOutputHeader output) => storageSession.ListPush(key, ref input, out output, ref objectContext); + /// + public GarnetStatus ListPosition(byte[] key, ref ObjectInput input, ref GarnetObjectStoreOutput outputFooter) + => storageSession.ListPosition(key, ref input, ref outputFooter, ref objectContext); + /// public GarnetStatus ListLeftPop(byte[] key, ref ObjectInput input, ref GarnetObjectStoreOutput outputFooter) => storageSession.ListPop(key, ref input, ref outputFooter, ref objectContext); diff --git a/libs/server/API/GarnetWatchApi.cs b/libs/server/API/GarnetWatchApi.cs index bbae63343a..8a4e6044b2 100644 --- a/libs/server/API/GarnetWatchApi.cs +++ b/libs/server/API/GarnetWatchApi.cs @@ -77,6 +77,24 @@ public GarnetStatus PTTL(ref SpanByte key, StoreType storeType, ref SpanByteAndM #endregion + #region EXPIRETIME + + /// + public GarnetStatus EXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output) + { + garnetApi.WATCH(new ArgSlice(ref key), storeType); + return garnetApi.EXPIRETIME(ref key, storeType, ref output); + } + + /// + public GarnetStatus PEXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output) + { + garnetApi.WATCH(new ArgSlice(ref key), storeType); + return garnetApi.PEXPIRETIME(ref key, storeType, ref output); + } + + #endregion + #region SortedSet Methods /// diff --git a/libs/server/API/IGarnetApi.cs b/libs/server/API/IGarnetApi.cs index 7a84d8603d..53af70fdb2 100644 --- a/libs/server/API/IGarnetApi.cs +++ b/libs/server/API/IGarnetApi.cs @@ -118,6 +118,16 @@ public interface IGarnetApi : IGarnetReadApi, IGarnetAdvancedApi /// /// GarnetStatus RENAME(ArgSlice oldKey, ArgSlice newKey, StoreType storeType = StoreType.All); + + /// + /// Renames key to newkey if newkey does not yet exist. It returns an error when key does not exist. + /// + /// The old key to be renamed. + /// The new key name. + /// The result of the operation. + /// The type of store to perform the operation on. + /// + GarnetStatus RENAMENX(ArgSlice oldKey, ArgSlice newKey, out int result, StoreType storeType = StoreType.All); #endregion #region EXISTS @@ -166,6 +176,32 @@ public interface IGarnetApi : IGarnetReadApi, IGarnetAdvancedApi #endregion + #region EXPIREAT + + /// + /// Set a timeout on key using absolute Unix timestamp (seconds since January 1, 1970) in seconds + /// + /// Key + /// Absolute Unix timestamp in seconds + /// Whether timeout was set by the call + /// Store type: main, object, or both + /// Expire option + /// + GarnetStatus EXPIREAT(ArgSlice key, long expiryTimestamp, out bool timeoutSet, StoreType storeType = StoreType.All, ExpireOption expireOption = ExpireOption.None); + + /// + /// Set a timeout on key using absolute Unix timestamp (seconds since January 1, 1970) in milliseconds + /// + /// Key + /// Absolute Unix timestamp in milliseconds + /// Whether timeout was set by the call + /// Store type: main, object, or both + /// Expire option + /// + GarnetStatus PEXPIREAT(ArgSlice key, long expiryTimestamp, out bool timeoutSet, StoreType storeType = StoreType.All, ExpireOption expireOption = ExpireOption.None); + + #endregion + #region PERSIST /// /// PERSIST @@ -589,6 +625,16 @@ public interface IGarnetApi : IGarnetReadApi, IGarnetAdvancedApi #region ListPush Methods + /// + /// The command returns the index of matching elements inside a Redis list. + /// By default, when no options are given, it will scan the list from head to tail, looking for the first match of "element". + /// + /// + /// + /// + /// + GarnetStatus ListPosition(byte[] key, ref ObjectInput input, ref GarnetObjectStoreOutput outputFooter); + /// /// ListLeftPush ArgSlice version with ObjectOutputHeader output /// @@ -1039,6 +1085,28 @@ public interface IGarnetReadApi #endregion + #region EXPIRETIME + + /// + /// Returns the absolute Unix timestamp (since January 1, 1970) in seconds at which the given key will expire. + /// + /// The key to get the expiration time for. + /// The type of store to retrieve the key from. + /// The output containing the expiration time. + /// The status of the operation. + GarnetStatus EXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output); + + /// + /// Returns the absolute Unix timestamp (since January 1, 1970) in milliseconds at which the given key will expire. + /// + /// The key to get the expiration time for. + /// The type of store to retrieve the key from. + /// The output containing the expiration time. + /// The status of the operation. + GarnetStatus PEXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output); + + #endregion + #region SortedSet Methods /// diff --git a/libs/server/ArgSlice/ScratchBufferManager.cs b/libs/server/ArgSlice/ScratchBufferManager.cs index c5b33e4573..a3b83d728f 100644 --- a/libs/server/ArgSlice/ScratchBufferManager.cs +++ b/libs/server/ArgSlice/ScratchBufferManager.cs @@ -253,6 +253,7 @@ public ArgSlice FormatCommandAsResp(string cmd, object[] args, Lua state) } scratchBufferOffset = (int)(ptr - scratchBufferHead); } + t.Dispose(); } else if (item is long i) { diff --git a/libs/server/Auth/GarnetACLAuthenticator.cs b/libs/server/Auth/GarnetACLAuthenticator.cs index 32636d6184..05fa6ee01c 100644 --- a/libs/server/Auth/GarnetACLAuthenticator.cs +++ b/libs/server/Auth/GarnetACLAuthenticator.cs @@ -8,7 +8,7 @@ namespace Garnet.server.Auth { - abstract class GarnetACLAuthenticator : IGarnetAuthenticator + public abstract class GarnetACLAuthenticator : IGarnetAuthenticator { /// /// The Access Control List to authenticate users against diff --git a/libs/server/ClientType.cs b/libs/server/ClientType.cs new file mode 100644 index 0000000000..0c0361e1f7 --- /dev/null +++ b/libs/server/ClientType.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; + +namespace Garnet.server +{ + /// + /// Type option for CLIENT|LIST and CLIENT|KILL commands. + /// + public enum ClientType : byte + { + /// + /// Default invalid case. + /// + Invalid = 0, + + /// + /// Normal client connections, including MONITOR parked connections. + /// + NORMAL, + /// + /// Connection from a MASTER cluster node to current node. + /// + MASTER, + /// + /// Connection from a REPLICA cluster node to current node. + /// + REPLICA, + /// + /// Connection which is dedicated to listening for PUBLISH data (Resp2 only). + /// + PUBSUB, + /// + /// Equivalent to . + /// + /// Separate value as SLAVE is not permitted on new commands, but is still supported + /// for older commands. + /// + SLAVE, + } + + public static class ClientTypeExtensions + { + /// + /// Validate that the given is legal, and _could_ have come from the given . + /// + /// TODO: Long term we can kill this and use instead of + /// and avoid extra validation. See: https://github.com/dotnet/runtime/issues/81500 . + /// + public static bool IsValid(this ClientType type, ref ArgSlice fromSlice) + { + return type != ClientType.Invalid && Enum.IsDefined(type) && !fromSlice.ReadOnlySpan.ContainsAnyInRange((byte)'0', (byte)'9'); + } + } +} \ No newline at end of file diff --git a/libs/server/Cluster/IClusterProvider.cs b/libs/server/Cluster/IClusterProvider.cs index 210fdc429c..a8cb4d920d 100644 --- a/libs/server/Cluster/IClusterProvider.cs +++ b/libs/server/Cluster/IClusterProvider.cs @@ -43,12 +43,29 @@ public interface IClusterProvider : IDisposable /// MetricsItem[] GetReplicationInfo(); + /// + /// Get buffer poolt stats + /// + /// + MetricsItem[] GetBufferPoolStats(); + + /// + /// Purger buffer pool for provided manager + /// + /// + void PurgeBufferPool(ManagerType managerType); + /// /// Is replica /// /// bool IsReplica(); + /// + /// Returns true if the given nodeId is a replica, according to the current cluster configuration. + /// + bool IsReplica(string nodeId); + /// /// On checkpoint initiated /// diff --git a/libs/server/Cluster/IClusterSession.cs b/libs/server/Cluster/IClusterSession.cs index c4ed266a45..2d80b13070 100644 --- a/libs/server/Cluster/IClusterSession.cs +++ b/libs/server/Cluster/IClusterSession.cs @@ -11,6 +11,11 @@ namespace Garnet.server /// public interface IClusterSession { + /// + /// If the current session is being used by a remote cluster node, the id that was last presented during a GOSSIP message. + /// + string RemoteNodeId { get; } + /// /// Type of session /// diff --git a/libs/server/Custom/CustomCommandManager.cs b/libs/server/Custom/CustomCommandManager.cs index 71f9e35ad5..07fa54224c 100644 --- a/libs/server/Custom/CustomCommandManager.cs +++ b/libs/server/Custom/CustomCommandManager.cs @@ -26,6 +26,7 @@ public class CustomCommandManager internal int CustomCommandsInfoCount => CustomCommandsInfo.Count; internal readonly Dictionary CustomCommandsInfo = new(StringComparer.OrdinalIgnoreCase); + internal readonly Dictionary CustomCommandsDocs = new(StringComparer.OrdinalIgnoreCase); /// /// Create new custom command manager @@ -38,7 +39,7 @@ public CustomCommandManager() customProcedureMap = new CustomProcedureWrapper[MaxRegistrations]; } - internal int Register(string name, CommandType type, CustomRawStringFunctions customFunctions, RespCommandsInfo commandInfo, long expirationTicks) + internal int Register(string name, CommandType type, CustomRawStringFunctions customFunctions, RespCommandsInfo commandInfo, RespCommandDocs commandDocs, long expirationTicks) { int id = Interlocked.Increment(ref RawStringCommandId) - 1; if (id >= MaxRegistrations) @@ -46,10 +47,11 @@ internal int Register(string name, CommandType type, CustomRawStringFunctions cu rawStringCommandMap[id] = new CustomRawStringCommand(name, (byte)id, type, customFunctions, expirationTicks); if (commandInfo != null) CustomCommandsInfo.Add(name, commandInfo); + if (commandDocs != null) CustomCommandsDocs.Add(name, commandDocs); return id; } - internal int Register(string name, Func proc, RespCommandsInfo commandInfo = null) + internal int Register(string name, Func proc, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) { int id = Interlocked.Increment(ref TransactionProcId) - 1; if (id >= MaxRegistrations) @@ -57,6 +59,7 @@ internal int Register(string name, Func proc, RespCo transactionProcMap[id] = new CustomTransaction(name, (byte)id, proc); if (commandInfo != null) CustomCommandsInfo.Add(name, commandInfo); + if (commandDocs != null) CustomCommandsDocs.Add(name, commandDocs); return id; } @@ -92,7 +95,7 @@ internal void RegisterType(int objectTypeId, CustomObjectFactory factory) objectCommandMap[objectTypeId] = new CustomObjectCommandWrapper((byte)objectTypeId, factory); } - internal (int objectTypeId, int subCommand) Register(string name, CommandType commandType, CustomObjectFactory factory, RespCommandsInfo commandInfo) + internal (int objectTypeId, int subCommand) Register(string name, CommandType commandType, CustomObjectFactory factory, RespCommandsInfo commandInfo, RespCommandDocs commandDocs) { int objectTypeId = -1; for (int i = 0; i < ObjectTypeId; i++) @@ -116,11 +119,12 @@ internal void RegisterType(int objectTypeId, CustomObjectFactory factory) wrapper.commandMap[subCommand] = new CustomObjectCommand(name, (byte)objectTypeId, (byte)subCommand, commandType, wrapper.factory); if (commandInfo != null) CustomCommandsInfo.Add(name, commandInfo); + if (commandDocs != null) CustomCommandsDocs.Add(name, commandDocs); return (objectTypeId, subCommand); } - internal (int objectTypeId, int subCommand) Register(string name, CommandType commandType, CustomObjectFactory factory, CustomObjectFunctions customObjectFunctions, RespCommandsInfo commandInfo) + internal (int objectTypeId, int subCommand) Register(string name, CommandType commandType, CustomObjectFactory factory, CustomObjectFunctions customObjectFunctions, RespCommandsInfo commandInfo, RespCommandDocs commandDocs) { var objectTypeId = -1; for (var i = 0; i < ObjectTypeId; i++) @@ -144,6 +148,7 @@ internal void RegisterType(int objectTypeId, CustomObjectFactory factory) wrapper.commandMap[subCommand] = new CustomObjectCommand(name, (byte)objectTypeId, (byte)subCommand, commandType, wrapper.factory, customObjectFunctions); if (commandInfo != null) CustomCommandsInfo.Add(name, commandInfo); + if (commandDocs != null) CustomCommandsDocs.Add(name, commandDocs); return (objectTypeId, subCommand); } @@ -154,9 +159,10 @@ internal void RegisterType(int objectTypeId, CustomObjectFactory factory) /// /// /// + /// /// /// - internal int Register(string name, CustomProcedure customProcedure, RespCommandsInfo commandInfo = null) + internal int Register(string name, CustomProcedure customProcedure, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) { int id = Interlocked.Increment(ref CustomProcedureId) - 1; if (id >= MaxRegistrations) @@ -164,6 +170,7 @@ internal int Register(string name, CustomProcedure customProcedure, RespCommands customProcedureMap[id] = new CustomProcedureWrapper(name, (byte)id, customProcedure); if (commandInfo != null) CustomCommandsInfo.Add(name, commandInfo); + if (commandDocs != null) CustomCommandsDocs.Add(name, commandDocs); return id; } @@ -225,11 +232,12 @@ internal bool Match(ReadOnlySpan command, out CustomProcedureWrapper cmd) internal bool TryGetCustomCommandInfo(string cmdName, out RespCommandsInfo respCommandsInfo) { - respCommandsInfo = default; - if (!this.CustomCommandsInfo.ContainsKey(cmdName)) return false; + return this.CustomCommandsInfo.TryGetValue(cmdName, out respCommandsInfo); + } - respCommandsInfo = this.CustomCommandsInfo[cmdName]; - return true; + internal bool TryGetCustomCommandDocs(string cmdName, out RespCommandDocs respCommandsDocs) + { + return this.CustomCommandsDocs.TryGetValue(cmdName, out respCommandsDocs); } } } \ No newline at end of file diff --git a/libs/server/Custom/CustomCommandRegistration.cs b/libs/server/Custom/CustomCommandRegistration.cs index 4c91e66ab2..ddf7f830a7 100644 --- a/libs/server/Custom/CustomCommandRegistration.cs +++ b/libs/server/Custom/CustomCommandRegistration.cs @@ -26,6 +26,11 @@ internal abstract class RegisterArgsBase /// RESP command info /// public RespCommandsInfo CommandInfo { get; set; } + + /// + /// RESP command docs + /// + public RespCommandDocs CommandDocs { get; set; } } @@ -185,6 +190,7 @@ public override void Register(CustomCommandManager customCommandManager) this.RegisterArgs.CommandType, this.Instance, this.RegisterArgs.CommandInfo, + this.RegisterArgs.CommandDocs, this.RegisterArgs.ExpirationTicks); } } @@ -200,7 +206,12 @@ public RegisterCustomObjectFactoryProvider(CustomObjectFactory instance, Registe public override void Register(CustomCommandManager customCommandManager) { - customCommandManager.Register(this.RegisterArgs.Name, this.RegisterArgs.CommandType, this.Instance, this.RegisterArgs.CommandInfo); + customCommandManager.Register( + this.RegisterArgs.Name, + this.RegisterArgs.CommandType, + this.Instance, + this.RegisterArgs.CommandInfo, + this.RegisterArgs.CommandDocs); } } @@ -218,7 +229,13 @@ public RegisterCustomObjectCommandProvider(CustomObjectFactory instance, Registe public override void Register(CustomCommandManager customCommandManager) { - customCommandManager.Register(RegisterArgs.Name, RegisterArgs.CommandType, factory, RegisterArgs.ObjectCommand, RegisterArgs.CommandInfo); + customCommandManager.Register( + RegisterArgs.Name, + RegisterArgs.CommandType, + factory, + RegisterArgs.ObjectCommand, + RegisterArgs.CommandInfo, + RegisterArgs.CommandDocs); } } @@ -233,7 +250,7 @@ public RegisterCustomTransactionProcedureProvider(CustomTransactionProcedure ins public override void Register(CustomCommandManager customCommandManager) { - customCommandManager.Register(this.RegisterArgs.Name, () => this.Instance, this.RegisterArgs.CommandInfo); + customCommandManager.Register(this.RegisterArgs.Name, () => this.Instance, this.RegisterArgs.CommandInfo, this.RegisterArgs.CommandDocs); } } } \ No newline at end of file diff --git a/libs/server/ExpireOption.cs b/libs/server/ExpireOption.cs index 407a8057a3..65b306e342 100644 --- a/libs/server/ExpireOption.cs +++ b/libs/server/ExpireOption.cs @@ -8,44 +8,36 @@ namespace Garnet.server /// /// Expire option /// + [Flags] public enum ExpireOption : byte { /// /// None /// - None, + None = 0, /// /// Set expiry only when the key has no expiry /// - NX, + NX = 1 << 0, /// /// Set expiry only when the key has an existing expiry /// - XX, + XX = 1 << 1, /// /// Set expiry only when the new expiry is greater than current one /// - GT, + GT = 1 << 2, /// /// Set expiry only when the new expiry is less than current one /// - LT - } - - /// - /// Extension methods for . - /// - public static class ExpireOptionExtensions - { + LT = 1 << 3, + /// + /// Set expiry only when the key has an existing expiry and the new expiry is greater than current one + /// + XXGT = XX | GT, /// - /// Validate that the given is legal, and _could_ have come from the given . - /// - /// TODO: Long term we can kill this and use instead of - /// and avoid extra validation. See: https://github.com/dotnet/runtime/issues/81500 . + /// Set expiry only when the key has an existing expiry and the new expiry is less than current one /// - public static bool IsValid(this ExpireOption type, ref ArgSlice fromSlice) - { - return type != ExpireOption.None && Enum.IsDefined(type) && !fromSlice.ReadOnlySpan.ContainsAnyInRange((byte)'0', (byte)'9'); - } + XXLT = XX | LT, } } \ No newline at end of file diff --git a/libs/server/Garnet.server.csproj b/libs/server/Garnet.server.csproj index 596927fcd4..e3abc5b5c2 100644 --- a/libs/server/Garnet.server.csproj +++ b/libs/server/Garnet.server.csproj @@ -7,15 +7,12 @@ true - - - - + diff --git a/libs/server/Lua/LuaCommands.cs b/libs/server/Lua/LuaCommands.cs index 94dd76f027..a0e61368a4 100644 --- a/libs/server/Lua/LuaCommands.cs +++ b/libs/server/Lua/LuaCommands.cs @@ -5,6 +5,7 @@ using System.Text; using Garnet.common; using Microsoft.Extensions.Logging; +using NLua; using NLua.Exceptions; namespace Garnet.server @@ -193,57 +194,114 @@ private unsafe bool ExecuteScript(int count, LuaRunner scriptRunner) { try { - object scriptResult = scriptRunner.Run(count, parseState); - if (scriptResult != null) + var scriptResult = scriptRunner.Run(count, parseState); + WriteObject(scriptResult); + } + catch (LuaScriptException ex) + { + logger?.LogError(ex.InnerException ?? ex, "Error executing Lua script callback"); + while (!RespWriteUtils.WriteError("ERR " + (ex.InnerException ?? ex).Message, ref dcurr, dend)) + SendAndReset(); + return true; + } + catch (Exception ex) + { + logger?.LogError(ex, "Error executing Lua script"); + while (!RespWriteUtils.WriteError("ERR " + ex.Message, ref dcurr, dend)) + SendAndReset(); + return true; + } + return true; + } + + void WriteObject(object scriptResult) + { + if (scriptResult != null) + { + if (scriptResult is string s) { - if (scriptResult is string s) - { - while (!RespWriteUtils.WriteAsciiBulkString(s, ref dcurr, dend)) - SendAndReset(); - } - else if ((scriptResult as byte?) != null && (byte)scriptResult == 36) //equals to $ + while (!RespWriteUtils.WriteAsciiBulkString(s, ref dcurr, dend)) + SendAndReset(); + } + else if ((scriptResult as byte?) != null && (byte)scriptResult == 36) //equals to $ + { + while (!RespWriteUtils.WriteDirect((byte[])scriptResult, ref dcurr, dend)) + SendAndReset(); + } + else if (scriptResult is bool b) + { + if (b) { - while (!RespWriteUtils.WriteDirect((byte[])scriptResult, ref dcurr, dend)) + while (!RespWriteUtils.WriteInteger(1, ref dcurr, dend)) SendAndReset(); } - else if (scriptResult as Int64? != null) + else { - while (!RespWriteUtils.WriteInteger((Int64)scriptResult, ref dcurr, dend)) + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_ERRNOTFOUND, ref dcurr, dend)) SendAndReset(); } - else if (scriptResult as ArgSlice? != null) + } + else if (scriptResult is long l) + { + while (!RespWriteUtils.WriteInteger(l, ref dcurr, dend)) + SendAndReset(); + } + else if (scriptResult is ArgSlice a) + { + while (!RespWriteUtils.WriteBulkString(a.ReadOnlySpan, ref dcurr, dend)) + SendAndReset(); + } + else if (scriptResult is object[] o) + { + // Two objects one boolean value and the result from the Lua Call + while (!RespWriteUtils.WriteAsciiBulkString(o[1].ToString().AsSpan(), ref dcurr, dend)) + SendAndReset(); + } + else if (scriptResult is LuaTable luaTable) + { + try { - while (!RespWriteUtils.WriteBulkString(((ArgSlice)scriptResult).ToArray(), ref dcurr, dend)) - SendAndReset(); + var retVal = luaTable["err"]; + if (retVal != null) + { + while (!RespWriteUtils.WriteError((string)retVal, ref dcurr, dend)) + SendAndReset(); + } + else + { + retVal = luaTable["ok"]; + if (retVal != null) + { + while (!RespWriteUtils.WriteAsciiBulkString((string)retVal, ref dcurr, dend)) + SendAndReset(); + } + else + { + int count = luaTable.Values.Count; + while (!RespWriteUtils.WriteArrayLength(count, ref dcurr, dend)) + SendAndReset(); + foreach (var value in luaTable.Values) + { + WriteObject(value); + } + } + } } - else if (scriptResult as Object[] != null) + finally { - // Two objects one boolean value and the result from the Lua Call - while (!RespWriteUtils.WriteAsciiBulkString((scriptResult as Object[])[1].ToString().AsSpan(), ref dcurr, dend)) - SendAndReset(); + luaTable.Dispose(); } } else { - while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_ERRNOTFOUND, ref dcurr, dend)) - SendAndReset(); + throw new LuaScriptException("Unknown return type", ""); } } - catch (LuaScriptException ex) - { - logger?.LogError(ex.InnerException, "Error executing Lua script callback"); - while (!RespWriteUtils.WriteError("ERR " + ex.InnerException.Message, ref dcurr, dend)) - SendAndReset(); - return true; - } - catch (Exception ex) + else { - logger?.LogError(ex, "Error executing Lua script"); - while (!RespWriteUtils.WriteError("ERR " + ex.Message, ref dcurr, dend)) + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_ERRNOTFOUND, ref dcurr, dend)) SendAndReset(); - return true; } - return true; } } } \ No newline at end of file diff --git a/libs/server/Lua/LuaRunner.cs b/libs/server/Lua/LuaRunner.cs index 06bc529e0c..ea2ef3f0dd 100644 --- a/libs/server/Lua/LuaRunner.cs +++ b/libs/server/Lua/LuaRunner.cs @@ -2,6 +2,7 @@ // Licensed under the MIT license. using System; +using System.Collections.Generic; using System.Text; using Garnet.common; using Microsoft.Extensions.Logging; @@ -22,9 +23,12 @@ internal sealed class LuaRunner : IDisposable readonly Lua state; readonly LuaTable sandbox_env; LuaFunction function; - string[] keys, argv; readonly TxnKeyEntries txnKeyEntries; readonly bool txnMode; + readonly LuaFunction garnetCall; + readonly LuaTable keyTable, argvTable; + int keyLength, argvLength; + Queue disposeQueue; /// /// Creates a new runner with the source of the script @@ -43,18 +47,26 @@ public LuaRunner(string source, bool txnMode = false, RespServerSession respServ if (txnMode) { this.txnKeyEntries = new TxnKeyEntries(16, respServerSession.storageSession.lockableContext, respServerSession.storageSession.objectStoreLockableContext); - state.RegisterFunction("garnet_call", this, this.GetType().GetMethod(nameof(garnet_call_txn))); + garnetCall = state.RegisterFunction("garnet_call", this, this.GetType().GetMethod(nameof(garnet_call_txn))); } else { - state.RegisterFunction("garnet_call", this, this.GetType().GetMethod("garnet_call")); + garnetCall = state.RegisterFunction("garnet_call", this, this.GetType().GetMethod("garnet_call")); } - state.DoString(@" + _ = state.DoString(@" import = function () end redis = {} function redis.call(cmd, ...) return garnet_call(cmd, ...) end + function redis.status_reply(text) + return text + end + function redis.error_reply(text) + return { err = text } + end + KEYS = {} + ARGV = {} sandbox_env = { tostring = tostring; next = next; @@ -65,7 +77,7 @@ function redis.call(cmd, ...) coroutine = coroutine; type = type; select = select; - unpack = unpack; + unpack = table.unpack; gcinfo = gcinfo; pairs = pairs; loadstring = loadstring; @@ -73,6 +85,10 @@ function redis.call(cmd, ...) error = error; redis = redis; math = math; + table = table; + string = string; + KEYS = KEYS; + ARGV = ARGV; } function load_sandboxed(source) if (not source) then return nil end @@ -80,6 +96,8 @@ function load_sandboxed(source) end "); sandbox_env = (LuaTable)state["sandbox_env"]; + keyTable = (LuaTable)state["KEYS"]; + argvTable = (LuaTable)state["ARGV"]; } /// @@ -126,9 +144,12 @@ public void Compile() /// public void Dispose() { + garnetCall?.Dispose(); + keyTable?.Dispose(); + argvTable?.Dispose(); + sandbox_env?.Dispose(); function?.Dispose(); state?.Dispose(); - sandbox_env?.Dispose(); } /// @@ -158,8 +179,8 @@ unsafe object ProcessCommandFromScripting(TGarnetApi api, string cmd switch (cmd) { // We special-case a few performance-sensitive operations to directly invoke via the storage API - case "SET": - case "set": + case "SET" when args.Length == 2: + case "set" when args.Length == 2: { if (!respServerSession.CheckACLPermissions(RespCommand.SET)) return Encoding.ASCII.GetString(CmdStrings.RESP_ERR_NOAUTH); @@ -205,8 +226,8 @@ unsafe object ProcessResponse(byte* ptr, int length) return resultStr; break; case (byte)':': - if (RespReadUtils.ReadIntegerAsString(out var resultInt, ref ptr, ptr + length)) - return resultInt; + if (RespReadUtils.Read64Int(out var number, ref ptr, ptr + length)) + return number; break; case (byte)'-': if (RespReadUtils.ReadErrorAsString(out resultStr, ref ptr, ptr + length)) @@ -220,7 +241,20 @@ unsafe object ProcessResponse(byte* ptr, int length) case (byte)'*': if (RespReadUtils.ReadStringArrayResponseWithLengthHeader(out var resultArray, ref ptr, ptr + length)) - return resultArray; + { + // Create return table + var returnValue = (LuaTable)state.DoString("return { }")[0]; + + // Queue up for disposal at the end of the script call + disposeQueue ??= new(); + disposeQueue.Enqueue(returnValue); + + // Populate the table + var i = 1; + foreach (var item in resultArray) + returnValue[i++] = item == null ? false : item; + return returnValue; + } break; default: @@ -239,12 +273,10 @@ public object Run(int count, SessionParseState parseState) int offset = 1; int nKeys = parseState.GetInt(offset++); count--; + ResetParameters(nKeys, count - nKeys); - string[] keys = null; if (nKeys > 0) { - // Lua uses 1-based indexing, so we allocate an extra entry in the array - keys = new string[nKeys + 1]; for (int i = 0; i < nKeys; i++) { if (txnMode) @@ -254,7 +286,7 @@ public object Run(int count, SessionParseState parseState) if (!respServerSession.storageSession.objectStoreLockableContext.IsNull) txnKeyEntries.AddKey(key, true, Tsavorite.core.LockType.Exclusive); } - keys[i + 1] = parseState.GetString(offset++); + keyTable[i + 1] = parseState.GetString(offset++); } count -= nKeys; @@ -265,34 +297,31 @@ public object Run(int count, SessionParseState parseState) //} } - string[] argv = null; if (count > 0) { - // Lua uses 1-based indexing, so we allocate an extra entry in the array - argv = new string[count + 1]; for (int i = 0; i < count; i++) { - argv[i + 1] = parseState.GetString(offset++); + argvTable[i + 1] = parseState.GetString(offset++); } } if (txnMode && nKeys > 0) { - return RunTransactionInternal(keys, argv); + return RunTransaction(); } else { - return RunInternal(keys, argv); + return Run(); } } /// /// Runs the precompiled Lua function with specified (keys, argv) state /// - public object Run(string[] keys, string[] argv) + public object Run(string[] keys = null, string[] argv = null) { scratchBufferManager?.Reset(); - + LoadParameters(keys, argv); if (txnMode && keys?.Length > 0) { // Add keys to the transaction @@ -303,15 +332,15 @@ public object Run(string[] keys, string[] argv) if (!respServerSession.storageSession.objectStoreLockableContext.IsNull) txnKeyEntries.AddKey(_key, true, Tsavorite.core.LockType.Exclusive); } - return RunTransactionInternal(keys, argv); + return RunTransaction(); } else { - return RunInternal(keys, argv); + return Run(); } } - object RunTransactionInternal(string[] keys, string[] argv) + object RunTransaction() { try { @@ -320,7 +349,7 @@ object RunTransactionInternal(string[] keys, string[] argv) respServerSession.storageSession.objectStoreLockableContext.BeginLockable(); respServerSession.SetTransactionMode(true); txnKeyEntries.LockAllKeys(); - return RunInternal(keys, argv); + return Run(); } finally { @@ -332,64 +361,56 @@ object RunTransactionInternal(string[] keys, string[] argv) } } - object RunInternal(string[] keys, string[] argv) + void ResetParameters(int nKeys, int nArgs) { - if (keys != this.keys) + if (keyLength > nKeys) { - if (keys == null) - { - this.keys = null; - sandbox_env["KEYS"] = this.keys; - } - else - { - if (this.keys != null && keys.Length == this.keys.Length) - Array.Copy(keys, this.keys, keys.Length); - else - { - this.keys = keys; - sandbox_env["KEYS"] = this.keys; - } - } + _ = state.DoString($"count = #KEYS for i={nKeys + 1}, {keyLength} do KEYS[i]=nil end"); } - if (argv != this.argv) + keyLength = nKeys; + if (argvLength > nArgs) { - if (argv == null) - { - this.argv = null; - sandbox_env["ARGV"] = this.argv; - } - else - { - if (this.argv != null && argv.Length == this.argv.Length) - Array.Copy(argv, this.argv, argv.Length); - else - { - this.argv = argv; - sandbox_env["ARGV"] = this.argv; - } - } + _ = state.DoString($"count = #ARGV for i={nArgs + 1}, {argvLength} do ARGV[i]=nil end"); } + argvLength = nArgs; + } - var result = function.Call(); - return result.Length > 0 ? result[0] : null; + void LoadParameters(string[] keys, string[] argv) + { + ResetParameters(keys?.Length ?? 0, argv?.Length ?? 0); + if (keys != null) + { + for (int i = 0; i < keys.Length; i++) + keyTable[i + 1] = keys[i]; + } + if (argv != null) + { + for (int i = 0; i < argv.Length; i++) + argvTable[i + 1] = argv[i]; + } } /// /// Runs the precompiled Lua function /// /// - public object Run() + object Run() { var result = function.Call(); - return result.Length > 0 ? result[0] : null; + Cleanup(); + return result?.Length > 0 ? result[0] : null; } - /// - /// Runs the precompiled Lua function - /// - /// - public void RunVoid() - => function.Call(); + void Cleanup() + { + if (disposeQueue != null) + { + while (disposeQueue.Count > 0) + { + var table = disposeQueue.Dequeue(); + table.Dispose(); + } + } + } } } \ No newline at end of file diff --git a/libs/server/Lua/ScratchBufferNetworkSender.cs b/libs/server/Lua/ScratchBufferNetworkSender.cs index 38a72550e0..8cdf7730a8 100644 --- a/libs/server/Lua/ScratchBufferNetworkSender.cs +++ b/libs/server/Lua/ScratchBufferNetworkSender.cs @@ -42,6 +42,8 @@ public void Reset() public string RemoteEndpointName => ""; + public string LocalEndpointName => ""; + /// /// Cleanup this DummyNetworkSender instance /// @@ -118,5 +120,11 @@ public void SendResponse(byte[] buffer, int offset, int count, object context) public void Throttle() { } + + /// + public bool TryClose() + { + return false; + } } } \ No newline at end of file diff --git a/libs/server/Lua/SessionScriptCache.cs b/libs/server/Lua/SessionScriptCache.cs index 05633aa77b..8d5ab8ff33 100644 --- a/libs/server/Lua/SessionScriptCache.cs +++ b/libs/server/Lua/SessionScriptCache.cs @@ -29,7 +29,7 @@ public SessionScriptCache(StoreWrapper storeWrapper, IGarnetAuthenticator authen { this.scratchBufferNetworkSender = new ScratchBufferNetworkSender(); this.storeWrapper = storeWrapper; - this.processor = new RespServerSession(scratchBufferNetworkSender, storeWrapper, null, null, authenticator, false); + this.processor = new RespServerSession(0, scratchBufferNetworkSender, storeWrapper, null, authenticator, false); this.logger = logger; } diff --git a/libs/server/Metrics/Info/GarnetInfoMetrics.cs b/libs/server/Metrics/Info/GarnetInfoMetrics.cs index 7254bf89ba..9ec1758cc5 100644 --- a/libs/server/Metrics/Info/GarnetInfoMetrics.cs +++ b/libs/server/Metrics/Info/GarnetInfoMetrics.cs @@ -37,6 +37,7 @@ class GarnetInfoMetrics MetricsItem[] persistenceInfo = null; MetricsItem[] clientsInfo = null; MetricsItem[] keyspaceInfo = null; + MetricsItem[] bufferPoolStats = null; public GarnetInfoMetrics() { } @@ -68,19 +69,28 @@ private void PopulateMemoryInfo(StoreWrapper storeWrapper) var total_main_store_size = main_store_index_size + main_store_log_memory_size + main_store_read_cache_size; var object_store_index_size = -1L; - var object_store_log_memory_references_size = -1L; - var object_store_read_cache_size = -1L; + var object_store_log_memory_size = -1L; + var object_store_read_cache_log_memory_size = -1L; + var object_store_heap_memory_size = -1L; + var object_store_read_cache_heap_memory_size = -1L; var total_object_store_size = -1L; var disableObj = storeWrapper.serverOptions.DisableObjects; + var aof_log_memory_size = storeWrapper.appendOnlyFile?.MemorySizeBytes ?? -1; + if (!disableObj) { object_store_index_size = storeWrapper.objectStore.IndexSize * 64; - object_store_log_memory_references_size = storeWrapper.objectStore.Log.MemorySizeBytes; - object_store_read_cache_size = (storeWrapper.objectStore.ReadCache != null ? storeWrapper.objectStore.ReadCache.MemorySizeBytes : 0); - total_object_store_size = object_store_index_size + object_store_log_memory_references_size + object_store_read_cache_size; + object_store_log_memory_size = storeWrapper.objectStore.Log.MemorySizeBytes; + object_store_read_cache_log_memory_size = storeWrapper.objectStore.ReadCache?.MemorySizeBytes ?? 0; + object_store_heap_memory_size = storeWrapper.objectStoreSizeTracker?.mainLogTracker.LogHeapSizeBytes ?? 0; + object_store_read_cache_heap_memory_size = storeWrapper.objectStoreSizeTracker?.readCacheTracker?.LogHeapSizeBytes ?? 0; + total_object_store_size = object_store_index_size + object_store_log_memory_size + object_store_read_cache_log_memory_size + object_store_heap_memory_size + object_store_read_cache_heap_memory_size; } + var gcMemoryInfo = GC.GetGCMemoryInfo(); + var gcAvailableMemory = gcMemoryInfo.TotalCommittedBytes - gcMemoryInfo.HeapSizeBytes; + memoryInfo = [ new("system_page_size", Environment.SystemPageSize.ToString()), @@ -104,14 +114,21 @@ private void PopulateMemoryInfo(StoreWrapper storeWrapper) new("proc_physical_memory_size(MB)", SystemMetrics.GetPhysicalMemoryUsage(1 << 20).ToString()), new("proc_peak_physical_memory_size", SystemMetrics.GetPeakPhysicalMemoryUsage().ToString()), new("proc_peak_physical_memory_size(MB)", SystemMetrics.GetPeakPhysicalMemoryUsage(1 << 20).ToString()), + new("gc_committed_bytes", gcMemoryInfo.TotalCommittedBytes.ToString()), + new("gc_heap_bytes", gcMemoryInfo.HeapSizeBytes.ToString()), + new("gc_managed_memory_bytes_excluding_heap", gcAvailableMemory.ToString()), + new("gc_fragmented_bytes", gcMemoryInfo.FragmentedBytes.ToString()), new("main_store_index_size", main_store_index_size.ToString()), new("main_store_log_memory_size", main_store_log_memory_size.ToString()), new("main_store_read_cache_size", main_store_read_cache_size.ToString()), new("total_main_store_size", total_main_store_size.ToString()), new("object_store_index_size", object_store_index_size.ToString()), - new("object_store_log_memory_references_size", object_store_log_memory_references_size.ToString()), - new("object_store_read_cache_size", object_store_read_cache_size.ToString()), - new("total_object_store_size", total_object_store_size.ToString()) + new("object_store_log_memory_size", object_store_log_memory_size.ToString()), + new("object_store_read_cache_log_memory_size", object_store_read_cache_log_memory_size.ToString()), + new("object_store_heap_memory_size", object_store_heap_memory_size.ToString()), + new("object_store_read_cache_heap_memory_size", object_store_read_cache_heap_memory_size.ToString()), + new("total_object_store_size", total_object_store_size.ToString()), + new("aof_memory_size", aof_log_memory_size.ToString()) ]; } @@ -224,15 +241,15 @@ private void PopulateObjectStoreStats(StoreWrapper storeWrapper) ]; } - public void PopulateStoreHashDistribution(StoreWrapper storeWrapper) => storeHashDistrInfo = [new("", storeWrapper.store.DumpDistribution())]; + private void PopulateStoreHashDistribution(StoreWrapper storeWrapper) => storeHashDistrInfo = [new("", storeWrapper.store.DumpDistribution())]; - public void PopulateObjectStoreHashDistribution(StoreWrapper storeWrapper) => objectStoreHashDistrInfo = [new("", storeWrapper.objectStore.DumpDistribution())]; + private void PopulateObjectStoreHashDistribution(StoreWrapper storeWrapper) => objectStoreHashDistrInfo = [new("", storeWrapper.objectStore.DumpDistribution())]; - public void PopulateStoreRevivInfo(StoreWrapper storeWrapper) => storeRevivInfo = [new("", storeWrapper.store.DumpRevivificationStats())]; + private void PopulateStoreRevivInfo(StoreWrapper storeWrapper) => storeRevivInfo = [new("", storeWrapper.store.DumpRevivificationStats())]; - public void PopulateObjectStoreRevivInfo(StoreWrapper storeWrapper) => objectStoreRevivInfo = [new("", storeWrapper.objectStore.DumpRevivificationStats())]; + private void PopulateObjectStoreRevivInfo(StoreWrapper storeWrapper) => objectStoreRevivInfo = [new("", storeWrapper.objectStore.DumpRevivificationStats())]; - public void PopulatePersistenceInfo(StoreWrapper storeWrapper) + private void PopulatePersistenceInfo(StoreWrapper storeWrapper) { bool aofEnabled = storeWrapper.serverOptions.EnableAOF; persistenceInfo = @@ -258,6 +275,13 @@ private void PopulateKeyspaceInfo(StoreWrapper storeWrapper) keyspaceInfo = null; } + private void PopulateClusterBufferPoolStats(StoreWrapper storeWrapper) + { + bufferPoolStats = [new("server_socket", storeWrapper.GetTcpServer().GetBufferPoolStats())]; + if (storeWrapper.clusterProvider != null) + bufferPoolStats = [.. bufferPoolStats, .. storeWrapper.clusterProvider.GetBufferPoolStats()]; + } + public static string GetSectionHeader(InfoMetricsType infoType) { return infoType switch @@ -277,6 +301,7 @@ public static string GetSectionHeader(InfoMetricsType infoType) InfoMetricsType.CLIENTS => "Clients", InfoMetricsType.KEYSPACE => "Keyspace", InfoMetricsType.MODULES => "Modules", + InfoMetricsType.BPSTATS => "BufferPool Stats", _ => "Default", }; } @@ -353,6 +378,9 @@ public string GetRespInfo(InfoMetricsType section, StoreWrapper storeWrapper) return GetSectionRespInfo(InfoMetricsType.KEYSPACE, keyspaceInfo); case InfoMetricsType.MODULES: return GetSectionRespInfo(section, null); + case InfoMetricsType.BPSTATS: + PopulateClusterBufferPoolStats(storeWrapper); + return GetSectionRespInfo(InfoMetricsType.BPSTATS, bufferPoolStats); default: return ""; } @@ -364,7 +392,9 @@ public string GetRespInfo(InfoMetricsType[] sections, StoreWrapper storeWrapper) for (var i = 0; i < sections.Length; i++) { var section = sections[i]; - response += GetRespInfo(section, storeWrapper); + var resp = GetRespInfo(section, storeWrapper); + if (string.IsNullOrEmpty(resp)) continue; + response += resp; response += sections.Length - 1 == i ? "" : "\r\n"; } return response; diff --git a/libs/server/Metrics/Info/InfoCommand.cs b/libs/server/Metrics/Info/InfoCommand.cs index 0d4f874aae..06aab51d34 100644 --- a/libs/server/Metrics/Info/InfoCommand.cs +++ b/libs/server/Metrics/Info/InfoCommand.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; -using System.Linq; using Garnet.common; namespace Garnet.server @@ -70,11 +69,19 @@ private bool NetworkINFO() } else { - InfoMetricsType[] sectionsArr = sections == null ? GarnetInfoMetrics.defaultInfo : [.. sections]; - GarnetInfoMetrics garnetInfo = new(); - string info = garnetInfo.GetRespInfo(sectionsArr, storeWrapper); - while (!RespWriteUtils.WriteAsciiBulkString(info, ref dcurr, dend)) - SendAndReset(); + var sectionsArr = sections == null ? GarnetInfoMetrics.defaultInfo : [.. sections]; + var garnetInfo = new GarnetInfoMetrics(); + var info = garnetInfo.GetRespInfo(sectionsArr, storeWrapper); + if (!string.IsNullOrEmpty(info)) + { + while (!RespWriteUtils.WriteAsciiBulkString(info, ref dcurr, dend)) + SendAndReset(); + } + else + { + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_ERRNOTFOUND, ref dcurr, dend)) + SendAndReset(); + } } return true; diff --git a/libs/server/Module/ModuleRegistrar.cs b/libs/server/Module/ModuleRegistrar.cs index eff832500e..90dfa9e3de 100644 --- a/libs/server/Module/ModuleRegistrar.cs +++ b/libs/server/Module/ModuleRegistrar.cs @@ -8,7 +8,7 @@ using System.Reflection; using Microsoft.Extensions.Logging; -namespace Garnet.server.Module +namespace Garnet.server { /// /// Abstract base class that all Garnet modules must inherit from. @@ -93,14 +93,15 @@ public ModuleActionStatus Initialize(string name, uint version) /// Command type /// Custom raw string function implementation /// Command info + /// RESP command docs /// Expiration ticks for the key /// Registration status - public ModuleActionStatus RegisterCommand(string name, CustomRawStringFunctions customFunctions, CommandType type = CommandType.ReadModifyWrite, RespCommandsInfo commandInfo = null, long expirationTicks = 0) + public ModuleActionStatus RegisterCommand(string name, CustomRawStringFunctions customFunctions, CommandType type = CommandType.ReadModifyWrite, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null, long expirationTicks = 0) { if (string.IsNullOrEmpty(name) || customFunctions == null) return ModuleActionStatus.InvalidRegistrationInfo; - customCommandManager.Register(name, type, customFunctions, commandInfo, expirationTicks); + customCommandManager.Register(name, type, customFunctions, commandInfo, commandDocs, expirationTicks); return ModuleActionStatus.Success; } @@ -109,15 +110,16 @@ public ModuleActionStatus RegisterCommand(string name, CustomRawStringFunctions /// Registers a custom transaction /// /// Transaction name - /// Transaction procedure implemenation + /// Transaction procedure implementation /// Command info + /// Command docs /// Registration status - public ModuleActionStatus RegisterTransaction(string name, Func proc, RespCommandsInfo commandInfo = null) + public ModuleActionStatus RegisterTransaction(string name, Func proc, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) { if (string.IsNullOrEmpty(name) || proc == null) return ModuleActionStatus.InvalidRegistrationInfo; - customCommandManager.Register(name, proc, commandInfo); + customCommandManager.Register(name, proc, commandInfo, commandDocs); return ModuleActionStatus.Success; } @@ -143,13 +145,14 @@ public ModuleActionStatus RegisterType(CustomObjectFactory factory) /// Custom object function implementation /// Command type /// Command info + /// RESP command docs /// - public ModuleActionStatus RegisterCommand(string name, CustomObjectFactory factory, CustomObjectFunctions command, CommandType type = CommandType.ReadModifyWrite, RespCommandsInfo commandInfo = null) + public ModuleActionStatus RegisterCommand(string name, CustomObjectFactory factory, CustomObjectFunctions command, CommandType type = CommandType.ReadModifyWrite, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) { if (string.IsNullOrEmpty(name) || factory == null || command == null) return ModuleActionStatus.InvalidRegistrationInfo; - customCommandManager.Register(name, type, factory, command, commandInfo); + customCommandManager.Register(name, type, factory, command, commandInfo, commandDocs); return ModuleActionStatus.Success; } @@ -160,22 +163,23 @@ public ModuleActionStatus RegisterCommand(string name, CustomObjectFactory facto /// Procedure name /// Custom procedure implementation /// Command info + /// Command docs /// Registration status - public ModuleActionStatus RegisterProcedure(string name, CustomProcedure customScriptProc, RespCommandsInfo commandInfo = null) + public ModuleActionStatus RegisterProcedure(string name, CustomProcedure customScriptProc, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) { if (string.IsNullOrEmpty(name) || customScriptProc == null) return ModuleActionStatus.InvalidRegistrationInfo; - customCommandManager.Register(name, customScriptProc, commandInfo); + customCommandManager.Register(name, customScriptProc, commandInfo, commandDocs); return ModuleActionStatus.Success; } } - internal sealed class ModuleRegistrar + public sealed class ModuleRegistrar { private static readonly Lazy lazy = new Lazy(() => new ModuleRegistrar()); - internal static ModuleRegistrar Instance { get { return lazy.Value; } } + public static ModuleRegistrar Instance { get { return lazy.Value; } } private ModuleRegistrar() { @@ -184,7 +188,7 @@ private ModuleRegistrar() private readonly ConcurrentDictionary modules; - internal bool LoadModule(CustomCommandManager customCommandManager, Assembly loadedAssembly, string[] moduleArgs, ILogger logger, out ReadOnlySpan errorMessage) + public bool LoadModule(CustomCommandManager customCommandManager, Assembly loadedAssembly, string[] moduleArgs, ILogger logger, out ReadOnlySpan errorMessage) { errorMessage = default; diff --git a/libs/server/Module/ModuleUtils.cs b/libs/server/Module/ModuleUtils.cs new file mode 100644 index 0000000000..2325ea89ac --- /dev/null +++ b/libs/server/Module/ModuleUtils.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Reflection.Metadata; +using System.Reflection.PortableExecutable; +using Garnet.common; + +namespace Garnet.server +{ + public class ModuleUtils + { + public static bool LoadAssemblies( + IEnumerable binaryPaths, + string[] allowedExtensionPaths, + bool allowUnsignedAssemblies, + out IEnumerable loadedAssemblies, + out ReadOnlySpan errorMessage) + { + loadedAssemblies = null; + errorMessage = default; + + // Get all binary file paths from inputs binary paths + if (!FileUtils.TryGetFiles(binaryPaths, out var files, out _, [".dll", ".exe"], SearchOption.AllDirectories)) + { + errorMessage = CmdStrings.RESP_ERR_GENERIC_GETTING_BINARY_FILES; + return false; + } + + // Check that all binary files are contained in allowed binary paths + var binaryFiles = files.ToArray(); + if (allowedExtensionPaths != null) + { + if (binaryFiles.Any(f => + allowedExtensionPaths.All(p => !FileUtils.IsFileInDirectory(f, p)))) + { + errorMessage = CmdStrings.RESP_ERR_GENERIC_BINARY_FILES_NOT_IN_ALLOWED_PATHS; + return false; + } + } + + // If necessary, check that all assemblies are digitally signed + if (!allowUnsignedAssemblies) + { + foreach (var filePath in files) + { + using var fs = File.OpenRead(filePath); + using var peReader = new PEReader(fs); + + var metadataReader = peReader.GetMetadataReader(); + var assemblyPublicKeyHandle = metadataReader.GetAssemblyDefinition().PublicKey; + + if (assemblyPublicKeyHandle.IsNil) + { + errorMessage = CmdStrings.RESP_ERR_GENERIC_ASSEMBLY_NOT_SIGNED; + return false; + } + + var publicKeyBytes = metadataReader.GetBlobBytes(assemblyPublicKeyHandle); + if (publicKeyBytes == null || publicKeyBytes.Length == 0) + { + errorMessage = CmdStrings.RESP_ERR_GENERIC_ASSEMBLY_NOT_SIGNED; + return false; + } + } + } + + // Get all assemblies from binary files + if (!FileUtils.TryLoadAssemblies(binaryFiles, out loadedAssemblies, out _)) + { + errorMessage = CmdStrings.RESP_ERR_GENERIC_LOADING_ASSEMBLIES; + return false; + } + + return true; + } + } +} \ No newline at end of file diff --git a/libs/server/Objects/List/ListObject.cs b/libs/server/Objects/List/ListObject.cs index bf6db1f3aa..24d717c4ad 100644 --- a/libs/server/Objects/List/ListObject.cs +++ b/libs/server/Objects/List/ListObject.cs @@ -34,6 +34,7 @@ public enum ListOperation : byte LSET, BRPOP, BLPOP, + LPOS, } /// @@ -179,6 +180,9 @@ public override unsafe bool Operate(ref ObjectInput input, ref SpanByteAndMemory case ListOperation.LSET: ListSet(ref input, ref output); break; + case ListOperation.LPOS: + ListPosition(ref input, ref output); + break; default: throw new GarnetException($"Unsupported operation {input.header.ListOp} in ListObject.Operate"); diff --git a/libs/server/Objects/List/ListObjectImpl.cs b/libs/server/Objects/List/ListObjectImpl.cs index fd0cbf2c20..4ee1ad3c24 100644 --- a/libs/server/Objects/List/ListObjectImpl.cs +++ b/libs/server/Objects/List/ListObjectImpl.cs @@ -4,6 +4,7 @@ using System; using System.Buffers; using System.Collections.Generic; +using System.Diagnostics; using System.Linq; using Garnet.common; using Tsavorite.core; @@ -418,5 +419,216 @@ private void ListSet(ref ObjectInput input, ref SpanByteAndMemory output) output.Length = (int)(output_currptr - output_startptr); } } + + private void ListPosition(ref ObjectInput input, ref SpanByteAndMemory output) + { + var element = input.parseState.GetArgSliceByRef(input.parseStateStartIdx).ReadOnlySpan; + input.parseStateStartIdx++; + + var isMemory = false; + MemoryHandle ptrHandle = default; + var output_startptr = output.SpanByte.ToPointer(); + var output_currptr = output_startptr; + var output_end = output_currptr + output.Length; + var count = 0; + var isDefaultCount = true; + ObjectOutputHeader outputHeader = default; + + try + { + if (!ReadListPositionInput(ref input, out var rank, out count, out isDefaultCount, out var maxlen, out var error)) + { + while (!RespWriteUtils.WriteError(error, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + return; + } + + if (count < 0) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + return; + } + + if (maxlen < 0) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + return; + } + + if (rank == 0) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + return; + } + + count = count == 0 ? list.Count : count; + var totalArrayHeaderLen = 0; + var lastFoundItemIndex = -1; + + if (!isDefaultCount) + { + while (!RespWriteUtils.WriteArrayLength(count, ref output_currptr, output_end, out var _, out totalArrayHeaderLen)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + } + + var noOfFoundItem = 0; + if (rank > 0) + { + var currentNode = list.First; + var currentIndex = 0; + var maxlenIndex = maxlen == 0 ? list.Count : maxlen; + do + { + var nextNode = currentNode.Next; + if (currentNode.Value.AsSpan().SequenceEqual(element)) + { + if (rank == 1) + { + lastFoundItemIndex = currentIndex; + while (!RespWriteUtils.WriteInteger(currentIndex, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + + noOfFoundItem++; + if (noOfFoundItem == count) + { + break; + } + } + else + { + rank--; + } + } + currentNode = nextNode; + currentIndex++; + } + while (currentNode != null && currentIndex < maxlenIndex); + } + else // (rank < 0) + { + var currentNode = list.Last; + var currentIndex = list.Count - 1; + var maxlenIndex = maxlen == 0 ? 0 : list.Count - maxlen; + do + { + var nextNode = currentNode.Previous; + if (currentNode.Value.AsSpan().SequenceEqual(element)) + { + if (rank == -1) + { + lastFoundItemIndex = currentIndex; + while (!RespWriteUtils.WriteInteger(currentIndex, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + + noOfFoundItem++; + if (noOfFoundItem == count) + { + break; + } + } + else + { + rank++; + } + } + currentNode = nextNode; + currentIndex--; + } + while (currentNode != null && currentIndex >= maxlenIndex); + } + + if (isDefaultCount && noOfFoundItem == 0) + { + output_currptr = output_startptr; + while (!RespWriteUtils.WriteNull(ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + } + else if (!isDefaultCount && noOfFoundItem == 0) + { + output_currptr = output_startptr; + while (!RespWriteUtils.WriteNullArray(ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + } + else if (!isDefaultCount && noOfFoundItem != count) + { + var newTotalArrayHeaderLen = 0; + var startOutputStartptr = output_startptr; + RespWriteUtils.WriteArrayLength(noOfFoundItem, ref startOutputStartptr, output_end, out var _, out newTotalArrayHeaderLen); // ReallocateOutput is not needed here as there should be always be available space in the output buffer as we have already written the max array length + Debug.Assert(totalArrayHeaderLen >= newTotalArrayHeaderLen, "newTotalArrayHeaderLen can't be bigger than totalArrayHeaderLen as we have already written max array lenght in the buffer"); + + if (totalArrayHeaderLen != newTotalArrayHeaderLen) + { + var remainingLength = (output_currptr - output_startptr) - totalArrayHeaderLen; + Buffer.MemoryCopy(output_startptr + totalArrayHeaderLen, output_startptr + newTotalArrayHeaderLen, remainingLength, remainingLength); + output_currptr = output_currptr - (totalArrayHeaderLen - newTotalArrayHeaderLen); + } + } + + outputHeader.result1 = noOfFoundItem; + } + finally + { + while (!RespWriteUtils.WriteDirect(ref outputHeader, ref output_currptr, output_end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref output_startptr, ref ptrHandle, ref output_currptr, ref output_end); + + if (isMemory) + ptrHandle.Dispose(); + output.Length = (int)(output_currptr - output_startptr); + } + } + + private static unsafe bool ReadListPositionInput(ref ObjectInput input, out int rank, out int count, out bool isDefaultCount, out int maxlen, out ReadOnlySpan error) + { + var currTokenIdx = input.parseStateStartIdx; + + rank = 1; // By default, LPOS takes first match element + count = 1; // By default, LPOS return 1 element + isDefaultCount = true; + maxlen = 0; // By default, iterate to all the item + + error = default; + + while (currTokenIdx < input.parseState.Count) + { + var sbParam = input.parseState.GetArgSliceByRef(currTokenIdx++).ReadOnlySpan; + + if (sbParam.SequenceEqual(CmdStrings.RANK) || sbParam.SequenceEqual(CmdStrings.rank)) + { + if (!input.parseState.TryGetInt(currTokenIdx++, out rank)) + { + error = CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER; + return false; + } + } + else if (sbParam.SequenceEqual(CmdStrings.COUNT) || sbParam.SequenceEqual(CmdStrings.count)) + { + if (!input.parseState.TryGetInt(currTokenIdx++, out count)) + { + error = CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER; + return false; + } + + isDefaultCount = false; + } + else if (sbParam.SequenceEqual(CmdStrings.MAXLEN) || sbParam.SequenceEqual(CmdStrings.maxlen)) + { + if (!input.parseState.TryGetInt(currTokenIdx++, out maxlen)) + { + error = CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER; + return false; + } + } + else + { + error = CmdStrings.RESP_SYNTAX_ERROR; + return false; + } + } + + return true; + } } } \ No newline at end of file diff --git a/libs/server/Objects/SortedSet/SortedSetObject.cs b/libs/server/Objects/SortedSet/SortedSetObject.cs index a04bf7f201..0da710bb2f 100644 --- a/libs/server/Objects/SortedSet/SortedSetObject.cs +++ b/libs/server/Objects/SortedSet/SortedSetObject.cs @@ -47,6 +47,37 @@ public enum SortedSetOperation : byte ZMSCORE } + [Flags] + public enum SortedSetAddOption + { + None = 0, + /// + /// Only update elements that already exist. Don't add new elements. + /// + XX = 1, + /// + /// Only add new elements. Don't update already existing elements. + /// + NX = 1 << 1, + /// + /// Only update existing elements if the new score is less than the current score. + /// + LT = 1 << 2, + /// + /// Only update existing elements if the new score is greater than the current score. + /// + GT = 1 << 3, + /// + /// Modify the return value from the number of new elements added, to the total number of elements changed. + /// Changed elements are new elements added and elements already existing for which the score was updated. + /// + CH = 1 << 4, + /// + /// When this option is specified ZADD acts like ZINCRBY. Only one score-element pair can be specified in this mode. + /// + INCR = 1 << 5, + } + /// /// Order variations for sorted set commands /// diff --git a/libs/server/Objects/SortedSet/SortedSetObjectImpl.cs b/libs/server/Objects/SortedSet/SortedSetObjectImpl.cs index 8abbde4662..eddab06a5c 100644 --- a/libs/server/Objects/SortedSet/SortedSetObjectImpl.cs +++ b/libs/server/Objects/SortedSet/SortedSetObjectImpl.cs @@ -30,6 +30,91 @@ private struct ZRangeOptions public bool WithScores { get; set; } }; + bool TryGetSortedSetAddOption(ReadOnlySpan item, out SortedSetAddOption options) + { + if (item.EqualsUpperCaseSpanIgnoringCase("XX"u8)) + { + options = SortedSetAddOption.XX; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("NX"u8)) + { + options = SortedSetAddOption.NX; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("LT"u8)) + { + options = SortedSetAddOption.LT; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("GT"u8)) + { + options = SortedSetAddOption.GT; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("CH"u8)) + { + options = SortedSetAddOption.CH; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("INCR"u8)) + { + options = SortedSetAddOption.INCR; + return true; + } + options = SortedSetAddOption.None; + return false; + } + + bool GetOptions(ref ObjectInput input, ref int currTokenIdx, out SortedSetAddOption options, ref byte* curr, byte* end, ref SpanByteAndMemory output, ref bool isMemory, ref byte* ptr, ref MemoryHandle ptrHandle) + { + options = SortedSetAddOption.None; + + while (currTokenIdx < input.parseState.Count) + { + if (!TryGetSortedSetAddOption(input.parseState.GetArgSliceByRef(currTokenIdx).ReadOnlySpan, out var currOption)) + break; + + options |= currOption; + currTokenIdx++; + } + + // Validate ZADD options combination + ReadOnlySpan optionsError = default; + + // XX & NX are mutually exclusive + if (options.HasFlag(SortedSetAddOption.XX) && options.HasFlag(SortedSetAddOption.NX)) + optionsError = CmdStrings.RESP_ERR_XX_NX_NOT_COMPATIBLE; + + // NX, GT & LT are mutually exclusive + if ((options.HasFlag(SortedSetAddOption.GT) && options.HasFlag(SortedSetAddOption.LT)) || + ((options.HasFlag(SortedSetAddOption.GT) || options.HasFlag(SortedSetAddOption.LT)) && + options.HasFlag(SortedSetAddOption.NX))) + optionsError = CmdStrings.RESP_ERR_GT_LT_NX_NOT_COMPATIBLE; + + // INCR supports only one score-element pair + if (options.HasFlag(SortedSetAddOption.INCR) && (input.parseState.Count - currTokenIdx > 2)) + optionsError = CmdStrings.RESP_ERR_INCR_SUPPORTS_ONLY_SINGLE_PAIR; + + if (!optionsError.IsEmpty) + { + while (!RespWriteUtils.WriteError(optionsError, ref curr, end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + return false; + } + + // From here on we expect only score-element pairs + // Remaining token count should be positive and even + if (currTokenIdx == input.parseState.Count || (input.parseState.Count - currTokenIdx) % 2 != 0) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_SYNTAX_ERROR, ref curr, end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + return false; + } + + return true; + } + private void SortedSetAdd(ref ObjectInput input, ref SpanByteAndMemory output) { var isMemory = false; @@ -40,43 +125,98 @@ private void SortedSetAdd(ref ObjectInput input, ref SpanByteAndMemory output) var end = curr + output.Length; ObjectOutputHeader outputHeader = default; - var added = 0; + var addedOrChanged = 0; + double incrResult = 0; + try { - for (var currIdx = input.parseStateStartIdx; currIdx < input.parseState.Count; currIdx += 2) + var options = SortedSetAddOption.None; + var currTokenIdx = input.parseStateStartIdx; + var parsedOptions = false; + + while (currTokenIdx < input.parseState.Count) { - if (!input.parseState.TryGetDouble(currIdx, out var score)) + // Try to parse a Score field + if (!input.parseState.TryGetDouble(currTokenIdx, out var score)) { - while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_NOT_VALID_FLOAT, ref curr, end)) - ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); - return; + // Try to get and validate options before the Score field, if any + if (!parsedOptions) + { + parsedOptions = true; + if (!GetOptions(ref input, ref currTokenIdx, out options, ref curr, end, ref output, ref isMemory, ref ptr, ref ptrHandle)) + return; + continue; // retry after parsing options + } + else + { + // Invalid Score encountered + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_NOT_VALID_FLOAT, ref curr, end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + return; + } } - var memberSpan = input.parseState.GetArgSliceByRef(currIdx + 1).ReadOnlySpan; + parsedOptions = true; + currTokenIdx++; + + // Member + var memberSpan = input.parseState.GetArgSliceByRef(currTokenIdx++).ReadOnlySpan; + var member = memberSpan.ToArray(); - var memberArray = memberSpan.ToArray(); - if (!sortedSetDict.TryGetValue(memberArray, out var scoreStored)) + // Add new member + if (!sortedSetDict.TryGetValue(member, out var scoreStored)) { - sortedSetDict.Add(memberArray, score); - if (sortedSet.Add((score, memberArray))) - { - added++; - } + // Don't add new member if XX flag is set + if (options.HasFlag(SortedSetAddOption.XX)) continue; + + sortedSetDict.Add(member, score); + if (sortedSet.Add((score, member))) + addedOrChanged++; this.UpdateSize(memberSpan); } - else if (scoreStored != score) + // Update existing member + else { - sortedSetDict[memberArray] = score; - var success = sortedSet.Remove((scoreStored, memberArray)); + // Update new score if INCR flag is set + if (options.HasFlag(SortedSetAddOption.INCR)) + { + score += scoreStored; + incrResult = score; + } + + // No need for update + if (score == scoreStored) + continue; + + // Don't update existing member if NX flag is set + // or if GT/LT flag is set and existing score is higher/lower than new score, respectively + if (options.HasFlag(SortedSetAddOption.NX) || + (options.HasFlag(SortedSetAddOption.GT) && scoreStored > score) || + (options.HasFlag(SortedSetAddOption.LT) && scoreStored < score)) continue; + + sortedSetDict[member] = score; + var success = sortedSet.Remove((scoreStored, member)); Debug.Assert(success); - success = sortedSet.Add((score, memberArray)); + success = sortedSet.Add((score, member)); Debug.Assert(success); + + // If CH flag is set, add changed member to final count + if (options.HasFlag(SortedSetAddOption.CH)) + addedOrChanged++; } } - while (!RespWriteUtils.WriteInteger(added, ref curr, end)) - ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + if (options.HasFlag(SortedSetAddOption.INCR)) + { + while (!RespWriteUtils.TryWriteDoubleBulkString(incrResult, ref curr, end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + } + else + { + while (!RespWriteUtils.WriteInteger(addedOrChanged, ref curr, end)) + ObjectUtils.ReallocateOutput(ref output, ref isMemory, ref ptr, ref ptrHandle, ref curr, ref end); + } } finally { diff --git a/libs/server/Objects/Types/GarnetObjectType.cs b/libs/server/Objects/Types/GarnetObjectType.cs index a5d7f3e596..0657ab059d 100644 --- a/libs/server/Objects/Types/GarnetObjectType.cs +++ b/libs/server/Objects/Types/GarnetObjectType.cs @@ -29,6 +29,16 @@ public enum GarnetObjectType : byte /// Set, + /// + /// Special type indicating EXPIRETIME command + /// + Expiretime = 0xf9, + + /// + /// Special type indicating PEXPIRETIME command + /// + PExpiretime = 0xfa, + /// /// Special type indicating PERSIST command /// diff --git a/libs/server/Providers/GarnetProvider.cs b/libs/server/Providers/GarnetProvider.cs index c474e4cdd2..10665c94c1 100644 --- a/libs/server/Providers/GarnetProvider.cs +++ b/libs/server/Providers/GarnetProvider.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +using System.Threading; using Garnet.common; using Garnet.networking; using Tsavorite.core; @@ -18,13 +19,13 @@ public sealed class GarnetProvider : TsavoriteKVProviderBase /// StoreWrapper /// internal StoreWrapper StoreWrapper => storeWrapper; - internal CollectionItemBroker itemBroker; - /// /// Create SpanByte TsavoriteKV backend for Garnet /// @@ -33,12 +34,10 @@ public sealed class GarnetProvider : TsavoriteKVProviderBase public GarnetProvider(StoreWrapper storeWrapper, SubscribeBroker> broker = null, - CollectionItemBroker itemBroker = null, MaxSizeSettings maxSizeSettings = default) : base(storeWrapper.store, new(), broker, false, maxSizeSettings) { this.storeWrapper = storeWrapper; - this.itemBroker = itemBroker; } /// @@ -67,7 +66,7 @@ public void Dispose() /// public override IMessageConsumer GetSession(WireFormat wireFormat, INetworkSender networkSender) => (wireFormat == WireFormat.ASCII) - ? new RespServerSession(networkSender, storeWrapper, broker, itemBroker, null, true) + ? new RespServerSession(Interlocked.Increment(ref lastSessionId), networkSender, storeWrapper, broker, null, true) : throw new GarnetException($"Unsupported wireFormat {wireFormat}"); } } \ No newline at end of file diff --git a/libs/server/PubSub/SubscribeBroker.cs b/libs/server/PubSub/SubscribeBroker.cs index 707fbf92e8..a3985a5eb5 100644 --- a/libs/server/PubSub/SubscribeBroker.cs +++ b/libs/server/PubSub/SubscribeBroker.cs @@ -39,13 +39,14 @@ public sealed class SubscribeBroker : IDispos /// Serializer for Prefix Match and serializing Key /// Directory where the log will be stored /// Page size of log used for pub/sub + /// Subscriber log refresh frequency /// start the log from scratch, do not continue - public SubscribeBroker(IKeySerializer keySerializer, string logDir, long pageSize, bool startFresh = true) + public SubscribeBroker(IKeySerializer keySerializer, string logDir, long pageSize, int subscriberRefreshFrequencyMs, bool startFresh = true) { this.keySerializer = keySerializer; device = logDir == null ? new NullDevice() : Devices.CreateLogDevice(logDir + "/pubsubkv", preallocateFile: false); device.Initialize((long)(1 << 30) * 64); - log = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSize = pageSize, MemorySize = pageSize * 4, AutoRefreshSafeTailAddress = true }); + log = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSize = pageSize, MemorySize = pageSize * 4, SafeTailRefreshFrequencyMs = subscriberRefreshFrequencyMs }); if (startFresh) log.TruncateUntil(log.CommittedUntilAddress); } @@ -135,13 +136,14 @@ private async Task Start(CancellationToken cancellationToken = default) var uniqueKeys = new Dictionary(ByteArrayComparer.Instance); long truncateUntilAddress = log.BeginAddress; - while (true) - { - if (disposed) - break; + using var iter = log.ScanSingle(log.BeginAddress, long.MaxValue, scanUncommitted: true); + var signal = iter.Signal; + using var registration = cts.Token.Register(signal); - using var iter = log.Scan(log.BeginAddress, long.MaxValue, scanUncommitted: true); + while (!disposed) + { await iter.WaitAsync(cancellationToken).ConfigureAwait(false); + if (cancellationToken.IsCancellationRequested) break; while (iter.GetNext(out byte[] subscriptionKeyValueAscii, out _, out long currentAddress, out long nextAddress)) { if (currentAddress >= long.MaxValue) return; diff --git a/libs/server/Resp/ACLCommands.cs b/libs/server/Resp/ACLCommands.cs index 3fdd0d9410..61c4f6e8c5 100644 --- a/libs/server/Resp/ACLCommands.cs +++ b/libs/server/Resp/ACLCommands.cs @@ -3,7 +3,6 @@ using System; using System.Diagnostics; -using System.Text; using Garnet.common; using Garnet.server.ACL; using Garnet.server.Auth; @@ -17,6 +16,37 @@ namespace Garnet.server /// internal sealed unsafe partial class RespServerSession : ServerSessionBase { + private bool ValidateACLAuthenticator() + { + if (_authenticator is null or not GarnetACLAuthenticator) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_ACL_AUTH_DISABLED, ref dcurr, dend)) + SendAndReset(); + return false; + } + return true; + } + + private bool ValidateACLFileUse() + { + if (storeWrapper.serverOptions.AuthSettings is not AclAuthenticationSettings) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_ACL_AUTH_DISABLED, ref dcurr, dend)) + SendAndReset(); + return false; + } + + var aclAuthenticationSettings = (AclAuthenticationSettings)storeWrapper.serverOptions.AuthSettings; + if (aclAuthenticationSettings.AclConfigurationFile == null) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_ACL_AUTH_FILE_DISABLED, ref dcurr, dend)) + SendAndReset(); + return false; + } + + return true; + } + /// /// Processes ACL LIST subcommand. /// @@ -31,8 +61,10 @@ private bool NetworkAclList() } else { - GarnetACLAuthenticator aclAuthenticator = (GarnetACLAuthenticator)_authenticator; + if (!ValidateACLAuthenticator()) + return true; + var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; var users = aclAuthenticator.GetAccessControlList().GetUsers(); while (!RespWriteUtils.WriteArrayLength(users.Count, ref dcurr, dend)) SendAndReset(); @@ -61,8 +93,10 @@ private bool NetworkAclUsers() } else { - GarnetACLAuthenticator aclAuthenticator = (GarnetACLAuthenticator)_authenticator; + if (!ValidateACLAuthenticator()) + return true; + var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; var users = aclAuthenticator.GetAccessControlList().GetUsers(); while (!RespWriteUtils.WriteArrayLength(users.Count, ref dcurr, dend)) SendAndReset(); @@ -91,6 +125,9 @@ private bool NetworkAclCat() } else { + if (!ValidateACLAuthenticator()) + return true; + var categories = ACLParser.ListCategories(); RespWriteUtils.WriteArrayLength(categories.Count, ref dcurr, dend); @@ -118,6 +155,9 @@ private bool NetworkAclSetUser() } else { + if (!ValidateACLAuthenticator()) + return true; + var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; // REQUIRED: username @@ -173,8 +213,10 @@ private bool NetworkAclDelUser() } else { - var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; + if (!ValidateACLAuthenticator()) + return true; + var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; var successfulDeletes = 0; try @@ -223,7 +265,10 @@ private bool NetworkAclWhoAmI() } else { - GarnetACLAuthenticator aclAuthenticator = (GarnetACLAuthenticator)_authenticator; + if (!ValidateACLAuthenticator()) + return true; + + var aclAuthenticator = (GarnetACLAuthenticator)_authenticator; // Return the name of the currently authenticated user. Debug.Assert(aclAuthenticator.GetUser() != null); @@ -249,6 +294,12 @@ private bool NetworkAclLoad() } else { + if (!ValidateACLAuthenticator()) + return true; + + if (!ValidateACLFileUse()) + return true; + // NOTE: This is temporary as long as ACL operations are only supported when using the ACL authenticator Debug.Assert(storeWrapper.serverOptions.AuthSettings != null); Debug.Assert(storeWrapper.serverOptions.AuthSettings.GetType().BaseType == typeof(AclAuthenticationSettings)); @@ -285,6 +336,12 @@ private bool NetworkAclSave() SendAndReset(); } + if (!ValidateACLAuthenticator()) + return true; + + if (!ValidateACLFileUse()) + return true; + // NOTE: This is temporary as long as ACL operations are only supported when using the ACL authenticator Debug.Assert(storeWrapper.serverOptions.AuthSettings != null); Debug.Assert(storeWrapper.serverOptions.AuthSettings.GetType().BaseType == typeof(AclAuthenticationSettings)); diff --git a/libs/server/Resp/AdminCommands.cs b/libs/server/Resp/AdminCommands.cs index a3ca0e5083..65cc09324c 100644 --- a/libs/server/Resp/AdminCommands.cs +++ b/libs/server/Resp/AdminCommands.cs @@ -6,12 +6,10 @@ using System.Diagnostics; using System.IO; using System.Linq; -using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using Garnet.common; using Garnet.server.Custom; -using Garnet.server.Module; namespace Garnet.server { @@ -22,8 +20,10 @@ internal sealed unsafe partial class RespServerSession : ServerSessionBase { private void ProcessAdminCommands(RespCommand command) { - hasAdminCommand = true; - + /* + * WARNING: Here is safe to add @slow commands (check how containsSlowCommand is used). + */ + containsSlowCommand = true; if (_authenticator.CanAuthenticate && !_authenticator.IsAuthenticated) { // If the current session is unauthenticated, we stop parsing, because no other commands are allowed @@ -39,6 +39,7 @@ private void ProcessAdminCommands(RespCommand command) RespCommand.CONFIG_SET => NetworkCONFIG_SET(), RespCommand.FAILOVER or RespCommand.REPLICAOF or + RespCommand.MIGRATE or RespCommand.SECONDARYOF => NetworkProcessClusterCommand(command), RespCommand.LATENCY_HELP => NetworkLatencyHelp(), RespCommand.LATENCY_HISTOGRAM => NetworkLatencyHistogram(), @@ -57,6 +58,7 @@ RespCommand.REPLICAOF or RespCommand.ACL_SAVE => NetworkAclSave(), RespCommand.REGISTERCS => NetworkRegisterCs(storeWrapper.customCommandManager), RespCommand.MODULE_LOADCS => NetworkModuleLoad(storeWrapper.customCommandManager), + RespCommand.PURGEBP => NetworkPurgeBP(), _ => cmdFound = false }; @@ -133,47 +135,36 @@ private bool NetworkMonitor() return true; } - private bool LoadAssemblies(IEnumerable binaryPaths, out IEnumerable loadedAssemblies, out ReadOnlySpan errorMessage) + private bool TryImportCommandsData(string cmdDataPath, out IReadOnlyDictionary cmdNameToData, out ReadOnlySpan errorMessage) where TData : class, IRespCommandData { - loadedAssemblies = null; + cmdNameToData = default; errorMessage = default; - // Get all binary file paths from inputs binary paths - if (!FileUtils.TryGetFiles(binaryPaths, out var files, out _, [".dll", ".exe"], - SearchOption.AllDirectories)) + // Check command info path, if specified + if (!File.Exists(cmdDataPath)) { - errorMessage = CmdStrings.RESP_ERR_GENERIC_GETTING_BINARY_FILES; + errorMessage = CmdStrings.RESP_ERR_GENERIC_GETTING_CMD_INFO_FILE; return false; } - // Check that all binary files are contained in allowed binary paths - var binaryFiles = files.ToArray(); - if (binaryFiles.Any(f => - storeWrapper.serverOptions.ExtensionBinPaths.All(p => !FileUtils.IsFileInDirectory(f, p)))) + // Check command info path is in allowed paths + if (storeWrapper.serverOptions.ExtensionBinPaths.All(p => !FileUtils.IsFileInDirectory(cmdDataPath, p))) { - errorMessage = CmdStrings.RESP_ERR_GENERIC_BINARY_FILES_NOT_IN_ALLOWED_PATHS; + errorMessage = CmdStrings.RESP_ERR_GENERIC_CMD_INFO_FILE_NOT_IN_ALLOWED_PATHS; return false; } - // Get all assemblies from binary files - if (!FileUtils.TryLoadAssemblies(binaryFiles, out loadedAssemblies, out _)) - { - errorMessage = CmdStrings.RESP_ERR_GENERIC_LOADING_ASSEMBLIES; - return false; - } + var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.Local); + var commandsInfoProvider = + RespCommandsDataProviderFactory.GetRespCommandsDataProvider(); + + var importSucceeded = commandsInfoProvider.TryImportRespCommandsData(cmdDataPath, + streamProvider, out cmdNameToData, logger); - // If necessary, check that all assemblies are digitally signed - if (!storeWrapper.serverOptions.ExtensionAllowUnsignedAssemblies) + if (!importSucceeded) { - foreach (var loadedAssembly in loadedAssemblies) - { - var publicKey = loadedAssembly.GetName().GetPublicKey(); - if (publicKey == null || publicKey.Length == 0) - { - errorMessage = CmdStrings.RESP_ERR_GENERIC_ASSEMBLY_NOT_SIGNED; - return false; - } - } + errorMessage = CmdStrings.RESP_ERR_GENERIC_MALFORMED_COMMAND_INFO_JSON; + return false; } return true; @@ -184,6 +175,7 @@ private bool LoadAssemblies(IEnumerable binaryPaths, out IEnumerable /// Binary paths from which to load assemblies /// Path of JSON file containing RespCommandsInfo for custom commands + /// Path of JSON file containing RespCommandDocs for custom commands /// Mapping between class names to register and arguments required for registration /// CustomCommandManager instance used to register commands /// If method returned false, contains ASCII encoded generic error string; otherwise default @@ -191,44 +183,31 @@ private bool LoadAssemblies(IEnumerable binaryPaths, out IEnumerable binaryPaths, string cmdInfoPath, + string cmdDocsPath, Dictionary> classNameToRegisterArgs, CustomCommandManager customCommandManager, out ReadOnlySpan errorMessage) { errorMessage = default; var classInstances = new Dictionary(); - IReadOnlyDictionary cmdNameToInfo = new Dictionary(); + + IReadOnlyDictionary cmdNameToInfo = default; + IReadOnlyDictionary cmdNameToDocs = default; if (cmdInfoPath != null) { - // Check command info path, if specified - if (!File.Exists(cmdInfoPath)) - { - errorMessage = CmdStrings.RESP_ERR_GENERIC_GETTING_CMD_INFO_FILE; + if (!TryImportCommandsData(cmdInfoPath, out cmdNameToInfo, out errorMessage)) return false; - } - - // Check command info path is in allowed paths - if (storeWrapper.serverOptions.ExtensionBinPaths.All(p => !FileUtils.IsFileInDirectory(cmdInfoPath, p))) - { - errorMessage = CmdStrings.RESP_ERR_GENERIC_CMD_INFO_FILE_NOT_IN_ALLOWED_PATHS; - return false; - } - - var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.Local); - var commandsInfoProvider = RespCommandsInfoProviderFactory.GetRespCommandsInfoProvider(); - - var importSucceeded = commandsInfoProvider.TryImportRespCommandsInfo(cmdInfoPath, - streamProvider, out cmdNameToInfo, logger); + } - if (!importSucceeded) - { - errorMessage = CmdStrings.RESP_ERR_GENERIC_MALFORMED_COMMAND_INFO_JSON; + if (cmdDocsPath != null) + { + if (!TryImportCommandsData(cmdDocsPath, out cmdNameToDocs, out errorMessage)) return false; - } } - if (!LoadAssemblies(binaryPaths, out var loadedAssemblies, out errorMessage)) + if (!ModuleUtils.LoadAssemblies(binaryPaths, storeWrapper.serverOptions.ExtensionBinPaths, + storeWrapper.serverOptions.ExtensionAllowUnsignedAssemblies, out var loadedAssemblies, out errorMessage)) return false; foreach (var c in classNameToRegisterArgs.Keys) @@ -288,9 +267,15 @@ private bool TryRegisterCustomCommands( foreach (var args in classNameToArgs.Value) { // Add command info to register arguments, if exists - if (cmdNameToInfo.ContainsKey(args.Name)) + if (cmdNameToInfo != null && cmdNameToInfo.TryGetValue(args.Name, out var cmdInfo)) { - args.CommandInfo = cmdNameToInfo[args.Name]; + args.CommandInfo = cmdInfo; + } + + // Add command docs to register arguments, if exists + if (cmdNameToDocs != null && cmdNameToDocs.TryGetValue(args.Name, out var cmdDocs)) + { + args.CommandDocs = cmdDocs; } var registerApi = @@ -328,6 +313,7 @@ private bool NetworkRegisterCs(CustomCommandManager customCommandManager) var binaryPaths = new HashSet(); string cmdInfoPath = default; + string cmdDocsPath = default; // Custom class name to arguments read from each sub-command var classNameToRegisterArgs = new Dictionary>(); @@ -339,9 +325,10 @@ private bool NetworkRegisterCs(CustomCommandManager customCommandManager) // Parse the REGISTERCS command - list of registration sub-commands // followed by an optional path to JSON file containing an array of RespCommandsInfo objects, + // followed by an optional path to JSON file containing an array of RespCommandsDocs objects, // followed by a list of paths to binary files / folders // Syntax - REGISTERCS cmdType name numParams className [expTicks] [objCmdName] [cmdType name numParams className [expTicks] [objCmdName]...] - // [INFO path] SRC path [path ...] + // [INFO path] [DOCS path] SRC path [path ...] RegisterArgsBase args = null; var tokenIdx = 0; @@ -377,6 +364,18 @@ private bool NetworkRegisterCs(CustomCommandManager customCommandManager) cmdInfoPath = parseState.GetString(tokenIdx++); continue; } + else if (token.EqualsUpperCaseSpanIgnoringCase(CmdStrings.DOCS)) + { + // If first token is not a cmdType and no other sub-command is previously defined, command is malformed + if (classNameToRegisterArgs.Count == 0 || tokenIdx == parseState.Count) + { + errorMsg = CmdStrings.RESP_ERR_GENERIC_MALFORMED_REGISTERCS_COMMAND; + break; + } + + cmdDocsPath = parseState.GetString(tokenIdx++); + continue; + } else if (readPathsOnly || token.EqualsUpperCaseSpanIgnoringCase(CmdStrings.SRC)) { // If first token is not a cmdType and no other sub-command is previously defined, command is malformed @@ -455,7 +454,7 @@ private bool NetworkRegisterCs(CustomCommandManager customCommandManager) // If no error is found, continue to try register custom commands in the server if (errorMsg.IsEmpty && - TryRegisterCustomCommands(binaryPaths, cmdInfoPath, classNameToRegisterArgs, customCommandManager, out errorMsg)) + TryRegisterCustomCommands(binaryPaths, cmdInfoPath, cmdDocsPath, classNameToRegisterArgs, customCommandManager, out errorMsg)) { while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_OK, ref dcurr, dend)) SendAndReset(); @@ -485,7 +484,8 @@ private bool NetworkModuleLoad(CustomCommandManager customCommandManager) for (var i = 0; i < moduleArgs.Length; i++) moduleArgs[i] = parseState.GetArgSliceByRef(i + 1).ToString(); - if (LoadAssemblies([modulePath], out var loadedAssemblies, out var errorMsg)) + if (ModuleUtils.LoadAssemblies([modulePath], storeWrapper.serverOptions.ExtensionBinPaths, + storeWrapper.serverOptions.ExtensionAllowUnsignedAssemblies, out var loadedAssemblies, out var errorMsg)) { Debug.Assert(loadedAssemblies != null); var assembliesList = loadedAssemblies.ToList(); diff --git a/libs/server/Resp/BasicCommands.cs b/libs/server/Resp/BasicCommands.cs index c86ca30b84..3e9177ad06 100644 --- a/libs/server/Resp/BasicCommands.cs +++ b/libs/server/Resp/BasicCommands.cs @@ -1024,6 +1024,57 @@ private bool NetworkCOMMAND_COUNT() return true; } + /// + /// Processes COMMAND INFO subcommand. + /// + /// true if parsing succeeded correctly, false if not all tokens could be consumed and further processing is necessary. + private bool NetworkCOMMAND_DOCS() + { + var count = parseState.Count; + + var resultSb = new StringBuilder(); + var docsCount = 0; + + if (count == 0) + { + if (!RespCommandDocs.TryGetRespCommandsDocs(out var cmdsDocs, true, logger)) + return true; + + foreach (var cmdDocs in cmdsDocs.Values) + { + docsCount++; + resultSb.Append(cmdDocs.RespFormat); + } + + foreach (var customCmd in storeWrapper.customCommandManager.CustomCommandsDocs.Values) + { + docsCount++; + resultSb.Append(customCmd.RespFormat); + } + } + else + { + for (var i = 0; i < count; i++) + { + var cmdName = parseState.GetString(i); + if (RespCommandDocs.TryGetRespCommandDocs(cmdName, out var cmdDocs, true, true, logger) || + storeWrapper.customCommandManager.TryGetCustomCommandDocs(cmdName, out cmdDocs)) + { + docsCount++; + resultSb.Append(cmdDocs.RespFormat); + } + } + } + + while (!RespWriteUtils.WriteArrayLength(docsCount * 2, ref dcurr, dend)) + SendAndReset(); + + while (!RespWriteUtils.WriteAsciiDirect(resultSb.ToString(), ref dcurr, dend)) + SendAndReset(); + + return true; + } + /// /// Processes COMMAND INFO subcommand. /// @@ -1045,7 +1096,7 @@ private bool NetworkCOMMAND_INFO() { var cmdName = parseState.GetString(i); - if (RespCommandsInfo.TryGetRespCommandInfo(cmdName, out var cmdInfo, logger) || + if (RespCommandsInfo.TryGetRespCommandInfo(cmdName, out var cmdInfo, true, true, logger) || storeWrapper.customCommandManager.TryGetCustomCommandInfo(cmdName, out cmdInfo)) { while (!RespWriteUtils.WriteAsciiDirect(cmdInfo.RespFormat, ref dcurr, dend)) @@ -1483,5 +1534,64 @@ void ExecuteFlushDb(bool unsafeTruncateLog) storeWrapper.store.Log.ShiftBeginAddress(storeWrapper.store.Log.TailAddress, truncateLog: unsafeTruncateLog); storeWrapper.objectStore?.Log.ShiftBeginAddress(storeWrapper.objectStore.Log.TailAddress, truncateLog: unsafeTruncateLog); } + + /// + /// Writes a string describing the given session into the string builder. + /// Does not append a new line. + /// + /// Not all Redis fields are written as they do not all have Garnet equivalents. + /// + private static void WriteClientInfo(IClusterProvider provider, StringBuilder into, RespServerSession targetSession, long nowMilliseconds) + { + var id = targetSession.Id; + var remoteEndpoint = targetSession.networkSender.RemoteEndpointName; + var localEndpoint = targetSession.networkSender.LocalEndpointName; + var clientName = targetSession.clientName; + var user = targetSession._user; + var resp = targetSession.respProtocolVersion; + var nodeId = targetSession?.clusterSession?.RemoteNodeId; + + into.Append($"id={id}"); + into.Append($" addr={remoteEndpoint}"); + into.Append($" laddr={localEndpoint}"); + if (clientName is not null) + { + into.Append($" name={clientName}"); + } + + var ageSec = (nowMilliseconds - targetSession.CreationTicks) / 1_000; + + into.Append($" age={ageSec}"); + + if (user is not null) + { + into.Append($" user={user.Name}"); + } + + if (provider is not null && nodeId is not null) + { + if (provider.IsReplica(nodeId)) + { + into.Append($" flags=S"); + } + else + { + into.Append($" flags=M"); + } + } + else + { + if (targetSession.isSubscriptionSession) + { + into.Append($" flags=P"); + } + else + { + into.Append($" flags=N"); + } + } + + into.Append($" resp={resp}"); + } } } \ No newline at end of file diff --git a/libs/server/Resp/ClientCommands.cs b/libs/server/Resp/ClientCommands.cs new file mode 100644 index 0000000000..2917264467 --- /dev/null +++ b/libs/server/Resp/ClientCommands.cs @@ -0,0 +1,499 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Garnet.common; +using Microsoft.Extensions.Logging; + +namespace Garnet.server +{ + /// + /// Server session for RESP protocol - client commands are in this file + /// + internal sealed unsafe partial class RespServerSession : ServerSessionBase + { + /// + /// CLIENT LIST + /// + private bool NetworkCLIENTLIST() + { + if (Server is GarnetServerBase garnetServer) + { + IEnumerable toInclude; + RespServerSession[] rentedBuffer = null; + + try + { + if (parseState.Count == 0) + { + toInclude = garnetServer.ActiveConsumers().OfType(); + } + else if (parseState.Count < 2) + { + return AbortWithErrorMessage(CmdStrings.RESP_SYNTAX_ERROR); + } + else + { + ref var filter = ref parseState.GetArgSliceByRef(0); + AsciiUtils.ToUpperInPlace(filter.Span); + + if (filter.Span.SequenceEqual(CmdStrings.TYPE)) + { + if (parseState.Count != 2) + { + return AbortWithErrorMessage(CmdStrings.RESP_SYNTAX_ERROR); + } + + var invalidType = + !parseState.TryGetEnum(1, true, out var clientType) || + (clientType == ClientType.SLAVE) || // SLAVE is not legal as CLIENT|LIST was introduced after the SLAVE -> REPLICA rename + !clientType.IsValid(ref parseState.GetArgSliceByRef(1)); + + if (invalidType) + { + var type = parseState.GetString(1); + return AbortWithErrorMessage(Encoding.UTF8.GetBytes(string.Format(CmdStrings.GenericUnknownClientType, type))); + } + + toInclude = + garnetServer + .ActiveConsumers() + .OfType() + .Where( + r => + { + ClientType effectiveType; + if (storeWrapper.clusterProvider is not null && r.clusterSession.RemoteNodeId is not null) + { + if (storeWrapper.clusterProvider.IsReplica(r.clusterSession.RemoteNodeId)) + { + effectiveType = ClientType.REPLICA; + } + else + { + effectiveType = ClientType.MASTER; + } + } + else + { + effectiveType = r.isSubscriptionSession ? ClientType.PUBSUB : ClientType.NORMAL; + } + + return effectiveType == clientType; + } + ); + } + else if (filter.Span.SequenceEqual(CmdStrings.ID)) + { + // Try and put all the ids onto the stack, if the count is small + var numIds = parseState.Count - 1; + + Span ids = stackalloc long[32]; + long[] rentedIds; + + if (numIds <= ids.Length) + { + ids = ids[..numIds]; + rentedIds = null; + } + else + { + rentedIds = ArrayPool.Shared.Rent(numIds); + ids = rentedIds[..numIds]; + } + + try + { + for (var idIx = 1; idIx < parseState.Count; idIx++) + { + if (!parseState.TryGetLong(idIx, out var id)) + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_INVALID_CLIENT_ID); + } + + ids[idIx - 1] = id; + } + + var respIx = 0; + rentedBuffer = ArrayPool.Shared.Rent(ids.Length); + + foreach (var consumer in garnetServer.ActiveConsumers()) + { + if (consumer is RespServerSession session && ids.IndexOf(session.Id) != -1) + { + rentedBuffer[respIx] = session; + respIx++; + } + } + + toInclude = respIx == rentedBuffer.Length ? rentedBuffer : rentedBuffer.Take(respIx); + } + finally + { + if (rentedIds is not null) + { + ArrayPool.Shared.Return(rentedIds); + } + } + } + else + { + return AbortWithErrorMessage(CmdStrings.RESP_SYNTAX_ERROR); + } + } + + var nowMilliseconds = Environment.TickCount64; + var clusterProvider = this.storeWrapper.clusterProvider; + var resultSb = new StringBuilder(); + var first = true; + foreach (var resp in toInclude) + { + if (!first) + { + // Redis uses a single \n, not \r\n like you might expect + resultSb.Append("\n"); + } + + WriteClientInfo(clusterProvider, resultSb, resp, nowMilliseconds); + first = false; + } + + var result = resultSb.ToString(); + while (!RespWriteUtils.WriteUtf8BulkString(result, ref dcurr, dend)) + SendAndReset(); + + return true; + } + finally + { + if (rentedBuffer is not null) + { + ArrayPool.Shared.Return(rentedBuffer); + } + } + } + else + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_CANNOT_LIST_CLIENTS); + } + } + + /// + /// CLIENT INFO + /// + /// + private bool NetworkCLIENTINFO() + { + if (parseState.Count != 0) + { + return AbortWithWrongNumberOfArguments("client|info"); + } + + var resultSb = new StringBuilder(); + WriteClientInfo(storeWrapper.clusterProvider, resultSb, this, Environment.TickCount64); + + var result = resultSb.ToString(); + while (!RespWriteUtils.WriteSimpleString(result, ref dcurr, dend)) + SendAndReset(); + + return true; + } + + /// + /// CLIENT KILL + /// + private bool NetworkCLIENTKILL() + { + if (Server is GarnetServerBase garnetServer) + { + if (parseState.Count == 0) + { + // Nothing takes 0 args + + return AbortWithWrongNumberOfArguments("CLIENT|KILL"); + } + else if (parseState.Count == 1) + { + // Old ip:port format + + var target = parseState.GetString(0); + + foreach (var consumer in garnetServer.ActiveConsumers()) + { + if (consumer is RespServerSession session) + { + if (session.networkSender.RemoteEndpointName == target) + { + _ = session.TryKill(); + + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_OK, ref dcurr, dend)) + SendAndReset(); + + return true; + } + } + } + + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_NO_SUCH_CLIENT, ref dcurr, dend)) + SendAndReset(); + + return true; + } + else + { + // New filter + value format + + long? id = null; + ClientType? type = null; + string user = null; + string addr = null; + string lAddr = null; + bool? skipMe = null; + long? maxAge = null; + + // Parse out all the filters + var argIx = 0; + while (argIx < parseState.Count) + { + if (argIx + 1 >= parseState.Count) + { + return AbortWithWrongNumberOfArguments("CLIENT|KILL"); + } + + ref var filter = ref parseState.GetArgSliceByRef(argIx); + var filterSpan = filter.Span; + + ref var value = ref parseState.GetArgSliceByRef(argIx + 1); + + AsciiUtils.ToUpperInPlace(filterSpan); + + if (filterSpan.SequenceEqual(CmdStrings.ID)) + { + if (!ParseUtils.TryReadLong(ref value, out var idParsed)) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericParamShouldBeGreaterThanZero, "client-id"))); + } + + if (id is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "ID"))); + } + + id = idParsed; + } + else if (filterSpan.SequenceEqual(CmdStrings.TYPE)) + { + if (type is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "TYPE"))); + } + + var unknownType = + !Enum.TryParse(ParseUtils.ReadString(ref value), true, out var typeParsed) || + !typeParsed.IsValid(ref value); + + if (unknownType) + { + var typeStr = ParseUtils.ReadString(ref value); + return AbortWithErrorMessage(Encoding.UTF8.GetBytes(string.Format(CmdStrings.GenericUnknownClientType, typeStr))); + } + + // Map SLAVE -> REPLICA for easier checking later + typeParsed = typeParsed == ClientType.SLAVE ? ClientType.REPLICA : typeParsed; + + type = typeParsed; + } + else if (filterSpan.SequenceEqual(CmdStrings.USER)) + { + if (user is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "USER"))); + } + + user = ParseUtils.ReadString(ref value); + } + else if (filterSpan.SequenceEqual(CmdStrings.ADDR)) + { + if (addr is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "ADDR"))); + } + + addr = ParseUtils.ReadString(ref value); + } + else if (filterSpan.SequenceEqual(CmdStrings.LADDR)) + { + if (lAddr is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "LADDR"))); + } + + lAddr = ParseUtils.ReadString(ref value); + } + else if (filterSpan.SequenceEqual(CmdStrings.SKIPME)) + { + if (skipMe is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "SKIPME"))); + } + + AsciiUtils.ToUpperInPlace(value.Span); + + if (value.Span.SequenceEqual(CmdStrings.YES)) + { + skipMe = true; + } + else if (value.Span.SequenceEqual(CmdStrings.NO)) + { + skipMe = false; + } + else + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_GENERIC_SYNTAX_ERROR); + } + } + else if (filterSpan.SequenceEqual(CmdStrings.MAXAGE)) + { + if (!ParseUtils.TryReadLong(ref value, out var maxAgeParsed)) + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_GENERIC_SYNTAX_ERROR); + } + + if (maxAge is not null) + { + return AbortWithErrorMessage(Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrDuplicateFilter, "MAXAGE"))); + } + + maxAge = maxAgeParsed; + } + else + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_GENERIC_SYNTAX_ERROR); + } + + argIx += 2; + } + + // SKIPME defaults to true + skipMe ??= true; + + logger?.LogInformation("Killing all sessions with id={id}, type={type}, user={user}, addr={addr}, laddr={lAddr}, maxAge={maxAge}, skipMe={skipMe}", id, type, user, addr, lAddr, maxAge, skipMe); + + var nowMilliseconds = Environment.TickCount64; + + // Actually go an kill matching ressions + var killed = 0; + foreach (var consumer in garnetServer.ActiveConsumers()) + { + if (consumer is RespServerSession session) + { + if (!IsMatch(storeWrapper.clusterProvider, this, nowMilliseconds, session, id, type, user, addr, lAddr, maxAge, skipMe.Value)) + { + continue; + } + + logger?.LogInformation("Attempting to kill session {Id}", session.Id); + + if (session.TryKill()) + { + logger?.LogInformation("Killed session {Id}", session.Id); + + killed++; + } + } + } + + // Hand back result, which is count of clients _actually_ killed + while (!RespWriteUtils.WriteInteger(killed, ref dcurr, dend)) + SendAndReset(); + + return true; + } + } + else + { + return AbortWithErrorMessage(CmdStrings.RESP_ERR_CANNOT_LIST_CLIENTS); + } + + // Returns true if the TARGET session is a match for all the given filter values + static bool IsMatch( + IClusterProvider clusterProvider, + RespServerSession currentSession, + long nowMilliseconds, + RespServerSession targetSession, + long? id, + ClientType? type, + string user, + string addr, + string lAddr, + long? maxAge, + bool skipMe + ) + { + if (skipMe && ReferenceEquals(currentSession, targetSession)) + { + return false; + } + + var matches = true; + if (id.HasValue) + { + matches &= id.Value == targetSession.Id; + } + + if (type is not null) + { + ClientType targetType; + if (clusterProvider is not null && targetSession.clusterSession?.RemoteNodeId is not null) + { + if (clusterProvider.IsReplica(targetSession.clusterSession.RemoteNodeId)) + { + targetType = ClientType.REPLICA; + } + else + { + targetType = ClientType.MASTER; + } + } + else + { + targetType = targetSession.isSubscriptionSession ? ClientType.PUBSUB : ClientType.NORMAL; + } + + matches &= type.Value == targetType; + } + + if (user is not null) + { + // Using an ORDINAL match to fail-safe, if unicode normalization would change either name I'd prefer to not-match + matches &= user.Equals(targetSession._user?.Name, StringComparison.Ordinal); + } + + if (addr is not null) + { + // Same logic, using ORDINAL to fail-safe + matches &= targetSession.networkSender.RemoteEndpointName.Equals(addr, StringComparison.Ordinal); + } + + if (lAddr is not null) + { + // And again, ORDINAL + matches &= targetSession.networkSender.LocalEndpointName.Equals(lAddr, StringComparison.Ordinal); + } + + if (maxAge is not null) + { + var targeAge = (nowMilliseconds - targetSession.CreationTicks) / 1_000; + + matches &= targeAge > maxAge.Value; + } + + return matches; + } + } + } +} \ No newline at end of file diff --git a/libs/server/Resp/CmdStrings.cs b/libs/server/Resp/CmdStrings.cs index 5defc72ab6..da69aba555 100644 --- a/libs/server/Resp/CmdStrings.cs +++ b/libs/server/Resp/CmdStrings.cs @@ -40,6 +40,7 @@ static partial class CmdStrings public static ReadOnlySpan LATENCY => "LATENCY"u8; public static ReadOnlySpan CLUSTER => "CLUSTER"u8; public static ReadOnlySpan MIGRATE => "MIGRATE"u8; + public static ReadOnlySpan PURGEBP => "PURGEBP"u8; public static ReadOnlySpan FAILOVER => "FAILOVER"u8; public static ReadOnlySpan HISTOGRAM => "HISTOGRAM"u8; public static ReadOnlySpan histogram => "histogram"u8; @@ -94,6 +95,10 @@ static partial class CmdStrings public static ReadOnlySpan XX => "XX"u8; public static ReadOnlySpan UNSAFETRUNCATELOG => "UNSAFETRUNCATELOG"u8; public static ReadOnlySpan SAMPLES => "SAMPLES"u8; + public static ReadOnlySpan RANK => "RANK"u8; + public static ReadOnlySpan rank => "rank"u8; + public static ReadOnlySpan MAXLEN => "MAXLEN"u8; + public static ReadOnlySpan maxlen => "maxlen"u8; /// /// Response strings @@ -173,6 +178,14 @@ static partial class CmdStrings public static ReadOnlySpan RESP_ERR_MODULE_ONLOAD => "ERR Error during module OnLoad"u8; public static ReadOnlySpan RESP_ERR_LIMIT_NOT_SUPPORTED => "ERR syntax error, LIMIT is only supported in combination with either BYSCORE or BYLEX"u8; public static ReadOnlySpan RESP_ERR_NO_SCRIPT => "NOSCRIPT No matching script. Please use EVAL."u8; + public static ReadOnlySpan RESP_ERR_CANNOT_LIST_CLIENTS => "ERR Clients cannot be listed."u8; + public static ReadOnlySpan RESP_ERR_NO_SUCH_CLIENT => "ERR No such client"u8; + public static ReadOnlySpan RESP_ERR_INVALID_CLIENT_ID => "ERR Invalid client ID"u8; + public static ReadOnlySpan RESP_ERR_ACL_AUTH_DISABLED => "ERR ACL Authenticator is disabled."u8; + public static ReadOnlySpan RESP_ERR_ACL_AUTH_FILE_DISABLED => "ERR This Garnet instance is not configured to use an ACL file. Please restart server with --acl-file option."u8; + public static ReadOnlySpan RESP_ERR_XX_NX_NOT_COMPATIBLE => "ERR XX and NX options at the same time are not compatible"u8; + public static ReadOnlySpan RESP_ERR_GT_LT_NX_NOT_COMPATIBLE => "ERR GT, LT, and/or NX options at the same time are not compatible"u8; + public static ReadOnlySpan RESP_ERR_INCR_SUPPORTS_ONLY_SINGLE_PAIR => "ERR INCR option supports a single increment-element pair"u8; /// /// Response string templates @@ -185,6 +198,8 @@ static partial class CmdStrings "ERR Invalid number of parameters to stored proc {0}, expected {1}, actual {2}"; public const string GenericSyntaxErrorOption = "ERR Syntax error in {0} option '{1}'"; public const string GenericParamShouldBeGreaterThanZero = "ERR {0} should be greater than 0"; + public const string GenericUnknownClientType = "ERR Unknown client type '{0}'"; + public const string GenericErrDuplicateFilter = "ERR Filter '{0}' defined multiple times"; /// /// Response errors while scripting @@ -256,6 +271,15 @@ static partial class CmdStrings public static ReadOnlySpan SLOTS => "SLOTS"u8; public static ReadOnlySpan REPLICAS => "REPLICAS"u8; public static ReadOnlySpan REPLICATE => "REPLICATE"u8; + public static ReadOnlySpan ID => "ID"u8; + public static ReadOnlySpan KILL => "KILL"u8; + public static ReadOnlySpan USER => "USER"u8; + public static ReadOnlySpan ADDR => "ADDR"u8; + public static ReadOnlySpan LADDR => "LADDR"u8; + public static ReadOnlySpan SKIPME => "SKIPME"u8; + public static ReadOnlySpan MAXAGE => "MAXAGE"u8; + public static ReadOnlySpan YES => "YES"u8; + public static ReadOnlySpan NO => "NO"u8; // Cluster subcommands which are internal and thus undocumented // diff --git a/libs/server/Resp/KeyAdminCommands.cs b/libs/server/Resp/KeyAdminCommands.cs index 2365e9bb27..4d2aaf6ddc 100644 --- a/libs/server/Resp/KeyAdminCommands.cs +++ b/libs/server/Resp/KeyAdminCommands.cs @@ -40,6 +40,37 @@ private bool NetworkRENAME(ref TGarnetApi storageApi) return true; } + /// + /// TryRENAMENX + /// + private bool NetworkRENAMENX(ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + if (parseState.Count != 2) + { + return AbortWithWrongNumberOfArguments(nameof(RespCommand.RENAMENX)); + } + + var oldKeySlice = parseState.GetArgSliceByRef(0); + var newKeySlice = parseState.GetArgSliceByRef(1); + var status = storageApi.RENAMENX(oldKeySlice, newKeySlice, out var result); + + if (status == GarnetStatus.OK) + { + // Integer reply: 1 if key was renamed to newkey. + // Integer reply: 0 if newkey already exists. + while (!RespWriteUtils.WriteInteger(result, ref dcurr, dend)) + SendAndReset(); + } + else + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_NOSUCHKEY, ref dcurr, dend)) + SendAndReset(); + } + + return true; + } + /// /// GETDEL command processor /// @@ -106,6 +137,32 @@ private bool NetworkEXISTS(ref TGarnetApi storageApi) return true; } + bool TryGetExpireOption(ReadOnlySpan item, out ExpireOption option) + { + if (item.EqualsUpperCaseSpanIgnoringCase("NX"u8)) + { + option = ExpireOption.NX; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("XX"u8)) + { + option = ExpireOption.XX; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("GT"u8)) + { + option = ExpireOption.GT; + return true; + } + if (item.EqualsUpperCaseSpanIgnoringCase("LT"u8)) + { + option = ExpireOption.LT; + return true; + } + option = ExpireOption.None; + return false; + } + /// /// Set a timeout on a key. /// @@ -117,7 +174,7 @@ private bool NetworkEXPIRE(RespCommand command, ref TGarnetApi stora where TGarnetApi : IGarnetApi { var count = parseState.Count; - if (count < 2 || count > 3) + if (count < 2 || count > 4) { return AbortWithWrongNumberOfArguments(nameof(RespCommand.EXPIRE)); } @@ -138,7 +195,7 @@ private bool NetworkEXPIRE(RespCommand command, ref TGarnetApi stora if (parseState.Count > 2) { - if (!parseState.TryGetEnum(2, true, out expireOption) || !expireOption.IsValid(ref parseState.GetArgSliceByRef(2))) + if (!TryGetExpireOption(parseState.GetArgSliceByRef(2).ReadOnlySpan, out expireOption)) { var optionStr = parseState.GetString(2); @@ -148,6 +205,36 @@ private bool NetworkEXPIRE(RespCommand command, ref TGarnetApi stora } } + if (parseState.Count > 3) + { + if (!TryGetExpireOption(parseState.GetArgSliceByRef(3).ReadOnlySpan, out var additionExpireOption)) + { + var optionStr = parseState.GetString(3); + + while (!RespWriteUtils.WriteError($"ERR Unsupported option {optionStr}", ref dcurr, dend)) + SendAndReset(); + return true; + } + + if (expireOption == ExpireOption.XX && (additionExpireOption == ExpireOption.GT || additionExpireOption == ExpireOption.LT)) + { + expireOption = ExpireOption.XX | additionExpireOption; + } + else if (expireOption == ExpireOption.GT && additionExpireOption == ExpireOption.XX) + { + expireOption = ExpireOption.XXGT; + } + else if (expireOption == ExpireOption.LT && additionExpireOption == ExpireOption.XX) + { + expireOption = ExpireOption.XXLT; + } + else + { + while (!RespWriteUtils.WriteError("ERR NX and XX, GT or LT options at the same time are not compatible", ref dcurr, dend)) + SendAndReset(); + } + } + var status = command == RespCommand.EXPIRE ? storageApi.EXPIRE(key, expiryMs, out var timeoutSet, StoreType.All, expireOption) : storageApi.PEXPIRE(key, expiryMs, out timeoutSet, StoreType.All, expireOption); @@ -166,6 +253,92 @@ private bool NetworkEXPIRE(RespCommand command, ref TGarnetApi stora return true; } + /// + /// Set a timeout on a key based on unix timestamp + /// + /// + /// Indicates which command to use, expire or pexpire. + /// + /// + private bool NetworkEXPIREAT(RespCommand command, ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + var count = parseState.Count; + if (count < 2 || count > 4) + { + return AbortWithWrongNumberOfArguments(nameof(RespCommand.EXPIREAT)); + } + + var key = parseState.GetArgSliceByRef(0); + if (!parseState.TryGetLong(1, out var expiryTimestamp)) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER, ref dcurr, dend)) + SendAndReset(); + return true; + } + + var expireOption = ExpireOption.None; + + if (parseState.Count > 2) + { + if (!TryGetExpireOption(parseState.GetArgSliceByRef(2).ReadOnlySpan, out expireOption)) + { + var optionStr = parseState.GetString(2); + + while (!RespWriteUtils.WriteError($"ERR Unsupported option {optionStr}", ref dcurr, dend)) + SendAndReset(); + return true; + } + } + + if (parseState.Count > 3) + { + if (!TryGetExpireOption(parseState.GetArgSliceByRef(3).ReadOnlySpan, out var additionExpireOption)) + { + var optionStr = parseState.GetString(3); + + while (!RespWriteUtils.WriteError($"ERR Unsupported option {optionStr}", ref dcurr, dend)) + SendAndReset(); + return true; + } + + if (expireOption == ExpireOption.XX && (additionExpireOption == ExpireOption.GT || additionExpireOption == ExpireOption.LT)) + { + expireOption = ExpireOption.XX | additionExpireOption; + } + else if (expireOption == ExpireOption.GT && additionExpireOption == ExpireOption.XX) + { + expireOption = ExpireOption.XXGT; + } + else if (expireOption == ExpireOption.LT && additionExpireOption == ExpireOption.XX) + { + expireOption = ExpireOption.XXLT; + } + else + { + while (!RespWriteUtils.WriteError("ERR NX and XX, GT or LT options at the same time are not compatible", ref dcurr, dend)) + SendAndReset(); + } + } + + var status = command == RespCommand.EXPIREAT ? + storageApi.EXPIREAT(key, expiryTimestamp, out var timeoutSet, StoreType.All, expireOption) : + storageApi.PEXPIREAT(key, expiryTimestamp, out timeoutSet, StoreType.All, expireOption); + + if (status == GarnetStatus.OK && timeoutSet) + { + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_RETURN_VAL_1, ref dcurr, dend)) + SendAndReset(); + } + else + { + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_RETURN_VAL_0, ref dcurr, dend)) + SendAndReset(); + } + + return true; + } + /// /// PERSIST command /// @@ -231,5 +404,41 @@ private bool NetworkTTL(RespCommand command, ref TGarnetApi storageA } return true; } + + /// + /// Get the absolute Unix timestamp at which the given key will expire. + /// + /// + /// either if the call is for EXPIRETIME or PEXPIRETIME command + /// + /// Returns the absolute Unix timestamp (since January 1, 1970) in seconds or milliseconds at which the given key will expire. + private bool NetworkEXPIRETIME(RespCommand command, ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + if (parseState.Count != 1) + { + return AbortWithWrongNumberOfArguments(nameof(RespCommand.EXPIRETIME)); + } + + var sbKey = parseState.GetArgSliceByRef(0).SpanByte; + var o = new SpanByteAndMemory(dcurr, (int)(dend - dcurr)); + var status = command == RespCommand.EXPIRETIME ? + storageApi.EXPIRETIME(ref sbKey, StoreType.All, ref o) : + storageApi.PEXPIRETIME(ref sbKey, StoreType.All, ref o); + + if (status == GarnetStatus.OK) + { + if (!o.IsSpanByte) + SendAndReset(o.Memory, o.Length); + else + dcurr += o.Length; + } + else + { + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_RETURN_VAL_N2, ref dcurr, dend)) + SendAndReset(); + } + return true; + } } } \ No newline at end of file diff --git a/libs/server/Resp/LocalServerSession.cs b/libs/server/Resp/LocalServerSession.cs index 79f670f99a..a507d13a1c 100644 --- a/libs/server/Resp/LocalServerSession.cs +++ b/libs/server/Resp/LocalServerSession.cs @@ -48,7 +48,7 @@ public LocalServerSession(StoreWrapper storeWrapper) this.scratchBufferManager = new ScratchBufferManager(); // Create storage session and API - this.storageSession = new StorageSession(storeWrapper, scratchBufferManager, sessionMetrics, LatencyMetrics, null, logger); + this.storageSession = new StorageSession(storeWrapper, scratchBufferManager, sessionMetrics, LatencyMetrics, logger); this.BasicGarnetApi = new BasicGarnetApi(storageSession, storageSession.basicContext, storageSession.objectStoreBasicContext); } diff --git a/libs/server/Resp/Objects/ListCommands.cs b/libs/server/Resp/Objects/ListCommands.cs index 56d1e086aa..159b00feec 100644 --- a/libs/server/Resp/Objects/ListCommands.cs +++ b/libs/server/Resp/Objects/ListCommands.cs @@ -156,6 +156,65 @@ private unsafe bool ListPop(RespCommand command, ref TGarnetApi stor return true; } + /// + /// The command returns the index of matching elements inside a Redis list. + /// By default, when no options are given, it will scan the list from head to tail, looking for the first match of "element". + /// + /// + /// + /// + private unsafe bool ListPosition(ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + if (parseState.Count < 2) + { + return AbortWithWrongNumberOfArguments(nameof(RespCommand.LPOS)); + } + + // Get the key for List + var sbKey = parseState.GetArgSliceByRef(0).SpanByte; + var element = parseState.GetArgSliceByRef(1).SpanByte; + var keyBytes = sbKey.ToByteArray(); + + if (NetworkSingleKeySlotVerify(keyBytes, false)) + { + return true; + } + + // Prepare input + var input = new ObjectInput + { + header = new RespInputHeader + { + type = GarnetObjectType.List, + ListOp = ListOperation.LPOS, + }, + parseState = parseState, + parseStateStartIdx = 1, + }; + + // Prepare GarnetObjectStore output + var outputFooter = new GarnetObjectStoreOutput { spanByteAndMemory = new SpanByteAndMemory(dcurr, (int)(dend - dcurr)) }; + + var statusOp = storageApi.ListPosition(keyBytes, ref input, ref outputFooter); + + switch (statusOp) + { + case GarnetStatus.OK: + ProcessOutputWithHeader(outputFooter.spanByteAndMemory); + break; + case GarnetStatus.NOTFOUND: + while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_ERRNOTFOUND, ref dcurr, dend)) + SendAndReset(); + break; + case GarnetStatus.WRONGTYPE: + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_WRONG_TYPE, ref dcurr, dend)) + SendAndReset(); + break; + } + + return true; + } /// /// LMPOP numkeys key [key ...] LEFT | RIGHT [COUNT count] @@ -278,7 +337,10 @@ private bool ListBlockingPop(RespCommand command) return true; } - var result = itemBroker.GetCollectionItemAsync(command, keysBytes, this, timeout).Result; + if (storeWrapper.itemBroker == null) + throw new GarnetException("Object store is disabled"); + + var result = storeWrapper.itemBroker.GetCollectionItemAsync(command, keysBytes, this, timeout).Result; if (!result.Found) { @@ -347,7 +409,10 @@ private unsafe bool ListBlockingMove(RespCommand command) return true; } - var result = itemBroker.MoveCollectionItemAsync(command, srcKey.ToArray(), this, timeout, + if (storeWrapper.itemBroker == null) + throw new GarnetException("Object store is disabled"); + + var result = storeWrapper.itemBroker.MoveCollectionItemAsync(command, srcKey.ToArray(), this, timeout, cmdArgs).Result; if (!result.Found) diff --git a/libs/server/Resp/Objects/SortedSetCommands.cs b/libs/server/Resp/Objects/SortedSetCommands.cs index 584a82d8a1..fdb15b79ab 100644 --- a/libs/server/Resp/Objects/SortedSetCommands.cs +++ b/libs/server/Resp/Objects/SortedSetCommands.cs @@ -27,11 +27,6 @@ private unsafe bool SortedSetAdd(ref TGarnetApi storageApi) return AbortWithWrongNumberOfArguments("ZADD"); } - if (parseState.Count % 2 != 1) - { - return AbortWithErrorMessage(CmdStrings.RESP_ERR_GENERIC_SYNTAX_ERROR); - } - // Get the key for SortedSet var sbKey = parseState.GetArgSliceByRef(0).SpanByte; var keyBytes = sbKey.ToByteArray(); diff --git a/libs/server/Resp/Parser/RespCommand.cs b/libs/server/Resp/Parser/RespCommand.cs index 4d84c304f2..1773d7a3e2 100644 --- a/libs/server/Resp/Parser/RespCommand.cs +++ b/libs/server/Resp/Parser/RespCommand.cs @@ -25,6 +25,7 @@ public enum RespCommand : byte COSCAN, DBSIZE, EXISTS, + EXPIRETIME, GEODIST, GEOHASH, GEOPOS, @@ -45,9 +46,11 @@ public enum RespCommand : byte KEYS, LINDEX, LLEN, + LPOS, LRANGE, MEMORY_USAGE, MGET, + PEXPIRETIME, PFCOUNT, PTTL, SCAN, @@ -84,6 +87,7 @@ public enum RespCommand : byte DECRBY, DEL, EXPIRE, + EXPIREAT, FLUSHALL, FLUSHDB, GEOADD, @@ -113,10 +117,12 @@ public enum RespCommand : byte MSETNX, PERSIST, PEXPIRE, + PEXPIREAT, PFADD, PFMERGE, PSETEX, RENAME, + RENAMENX, RPOP, RPOPLPUSH, RPUSH, @@ -146,7 +152,7 @@ public enum RespCommand : byte ZREMRANGEBYRANK, ZREMRANGEBYSCORE, - // BITOP is the true command, AND|OR|XOR|NOT are psuedo-subcommands + // BITOP is the true command, AND|OR|XOR|NOT are pseudo-subcommands BITOP, BITOP_AND, BITOP_OR, @@ -166,7 +172,12 @@ public enum RespCommand : byte ASKING, SELECT, ECHO, + CLIENT, + CLIENT_ID, + CLIENT_INFO, + CLIENT_LIST, + CLIENT_KILL, MONITOR, MODULE, @@ -191,6 +202,7 @@ public enum RespCommand : byte BGSAVE, COMMITAOF, FORCEGC, + PURGEBP, FAILOVER, // Custom commands @@ -216,6 +228,7 @@ public enum RespCommand : byte COMMAND, COMMAND_COUNT, + COMMAND_DOCS, COMMAND_INFO, MEMORY, @@ -299,7 +312,6 @@ public static class RespCommandExtensions RespCommand.PING, RespCommand.SELECT, RespCommand.ECHO, - RespCommand.CLIENT, RespCommand.MONITOR, RespCommand.MODULE_LOADCS, RespCommand.REGISTERCS, @@ -315,9 +327,15 @@ public static class RespCommandExtensions RespCommand.ACL_SETUSER, RespCommand.ACL_USERS, RespCommand.ACL_WHOAMI, + // Client + RespCommand.CLIENT_ID, + RespCommand.CLIENT_INFO, + RespCommand.CLIENT_LIST, + RespCommand.CLIENT_KILL, // Command RespCommand.COMMAND, RespCommand.COMMAND_COUNT, + RespCommand.COMMAND_DOCS, RespCommand.COMMAND_INFO, RespCommand.MEMORY_USAGE, // Config @@ -426,6 +444,8 @@ public static bool IsDataCommand(this RespCommand cmd) RespCommand.MEMORY_USAGE => false, RespCommand.FLUSHDB => false, RespCommand.FLUSHALL => false, + RespCommand.KEYS => false, + RespCommand.SCAN => false, _ => cmd >= FirstReadCommand() && cmd <= LastWriteCommand() }; } @@ -609,6 +629,7 @@ private RespCommand FastParseCommand(out int count) (2 << 4) | 6 when lastWord == MemoryMarshal.Read("INCRBY\r\n"u8) => RespCommand.INCRBY, (2 << 4) | 6 when lastWord == MemoryMarshal.Read("DECRBY\r\n"u8) => RespCommand.DECRBY, (2 << 4) | 6 when lastWord == MemoryMarshal.Read("RENAME\r\n"u8) => RespCommand.RENAME, + (2 << 4) | 8 when lastWord == MemoryMarshal.Read("NAMENX\r\n"u8) && *(ushort*)(ptr + 8) == MemoryMarshal.Read("RE"u8) => RespCommand.RENAMENX, (2 << 4) | 6 when lastWord == MemoryMarshal.Read("GETBIT\r\n"u8) => RespCommand.GETBIT, (2 << 4) | 6 when lastWord == MemoryMarshal.Read("APPEND\r\n"u8) => RespCommand.APPEND, (2 << 4) | 7 when lastWord == MemoryMarshal.Read("UBLISH\r\n"u8) && ptr[8] == 'P' => RespCommand.PUBLISH, @@ -756,6 +777,10 @@ private RespCommand FastParseArrayCommand(ref int count, ref ReadOnlySpan { return RespCommand.LSET; } + else if (*(ulong*)(ptr + 2) == MemoryMarshal.Read("\r\nLPOS\r\n"u8)) + { + return RespCommand.LPOS; + } break; case 'M': @@ -1233,6 +1258,10 @@ private RespCommand FastParseArrayCommand(ref int count, ref ReadOnlySpan { return RespCommand.BITFIELD; } + else if (*(ulong*)(ptr + 4) == MemoryMarshal.Read("EXPIREAT"u8) && *(ushort*)(ptr + 12) == MemoryMarshal.Read("\r\n"u8)) + { + return RespCommand.EXPIREAT; + } break; case 9: if (*(ulong*)(ptr + 4) == MemoryMarshal.Read("SUBSCRIB"u8) && *(uint*)(ptr + 11) == MemoryMarshal.Read("BE\r\n"u8)) @@ -1259,6 +1288,10 @@ private RespCommand FastParseArrayCommand(ref int count, ref ReadOnlySpan { return RespCommand.RPOPLPUSH; } + else if (*(ulong*)(ptr + 4) == MemoryMarshal.Read("PEXPIREA"u8) && *(uint*)(ptr + 11) == MemoryMarshal.Read("AT\r\n"u8)) + { + return RespCommand.PEXPIREAT; + } break; } @@ -1300,6 +1333,10 @@ private RespCommand FastParseArrayCommand(ref int count, ref ReadOnlySpan { return RespCommand.SDIFFSTORE; } + else if (*(ulong*)(ptr + 1) == MemoryMarshal.Read("10\r\nEXPI"u8) && *(uint*)(ptr + 9) == MemoryMarshal.Read("RETIME\r\n"u8)) + { + return RespCommand.EXPIRETIME; + } break; case 11: @@ -1327,6 +1364,10 @@ private RespCommand FastParseArrayCommand(ref int count, ref ReadOnlySpan { return RespCommand.SINTERSTORE; } + else if (*(ulong*)(ptr + 2) == MemoryMarshal.Read("1\r\nPEXPI"u8) && *(uint*)(ptr + 10) == MemoryMarshal.Read("RETIME\r\n"u8)) + { + return RespCommand.PEXPIRETIME; + } break; case 12: @@ -1467,7 +1508,41 @@ private RespCommand SlowParseCommand(ref int count, ref ReadOnlySpan speci } else if (command.SequenceEqual(CmdStrings.CLIENT)) { - return RespCommand.CLIENT; + if (count == 0) + { + specificErrorMsg = Encoding.ASCII.GetBytes(string.Format(CmdStrings.GenericErrWrongNumArgs, + nameof(RespCommand.CLIENT))); + } + else if (count >= 1) + { + Span subCommand = GetCommand(out bool gotSubCommand); + if (!gotSubCommand) + { + success = false; + return RespCommand.NONE; + } + + AsciiUtils.ToUpperInPlace(subCommand); + + count--; + + if (subCommand.SequenceEqual(CmdStrings.ID)) + { + return RespCommand.CLIENT_ID; + } + else if (subCommand.SequenceEqual(CmdStrings.INFO)) + { + return RespCommand.CLIENT_INFO; + } + else if (subCommand.SequenceEqual(CmdStrings.LIST)) + { + return RespCommand.CLIENT_LIST; + } + else if (subCommand.SequenceEqual(CmdStrings.KILL)) + { + return RespCommand.CLIENT_KILL; + } + } } else if (command.SequenceEqual(CmdStrings.AUTH)) { @@ -1499,10 +1574,16 @@ private RespCommand SlowParseCommand(ref int count, ref ReadOnlySpan speci { return RespCommand.COMMAND_COUNT; } - else if (subCommand.SequenceEqual(CmdStrings.INFO)) + + if (subCommand.SequenceEqual(CmdStrings.INFO)) { return RespCommand.COMMAND_INFO; } + + if (subCommand.SequenceEqual(CmdStrings.DOCS)) + { + return RespCommand.COMMAND_DOCS; + } } else if (command.SequenceEqual(CmdStrings.PING)) { @@ -1758,6 +1839,10 @@ private RespCommand SlowParseCommand(ref int count, ref ReadOnlySpan speci { return RespCommand.MIGRATE; } + else if (command.SequenceEqual(CmdStrings.PURGEBP)) + { + return RespCommand.PURGEBP; + } else if (command.SequenceEqual(CmdStrings.FAILOVER)) { return RespCommand.FAILOVER; diff --git a/libs/server/Resp/Parser/SessionParseState.cs b/libs/server/Resp/Parser/SessionParseState.cs index 292c2c6414..eb68f06556 100644 --- a/libs/server/Resp/Parser/SessionParseState.cs +++ b/libs/server/Resp/Parser/SessionParseState.cs @@ -208,24 +208,38 @@ public string GetString(int i) /// /// Get enum argument at the given index + /// Note: this method exists for compatibility with existing code. + /// For best performance use: ReadOnlySpan.EqualsUpperCaseSpanIgnoringCase(""VALUE""u8) to figure out the current enum value /// /// True if enum parsed successfully [MethodImpl(MethodImplOptions.AggressiveInlining)] public T GetEnum(int i, bool ignoreCase) where T : struct, Enum { Debug.Assert(i < Count); - return Enum.Parse(GetString(i), ignoreCase); + var strRep = GetString(i); + var value = Enum.Parse(strRep, ignoreCase); + // Extra check is to avoid numerical values being successfully parsed as enum value + return string.Equals(Enum.GetName(value), strRep, + ignoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal) ? default : value; } /// /// Try to get enum argument at the given index + /// Note: this method exists for compatibility with existing code. + /// For best performance use: ReadOnlySpan.EqualsUpperCaseSpanIgnoringCase(""VALUE""u8) to figure out the current enum value /// /// True if integer parsed successfully [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool TryGetEnum(int i, bool ignoreCase, out T value) where T : struct, Enum { Debug.Assert(i < Count); - return Enum.TryParse(GetString(i), ignoreCase, out value); + var strRep = GetString(i); + var successful = Enum.TryParse(strRep, ignoreCase, out value) && + // Extra check is to avoid numerical values being successfully parsed as enum value + string.Equals(Enum.GetName(value), strRep, + ignoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal); + if (!successful) value = default; + return successful; } /// diff --git a/libs/server/Resp/PubSubCommands.cs b/libs/server/Resp/PubSubCommands.cs index ccfc430788..2bd16051f5 100644 --- a/libs/server/Resp/PubSubCommands.cs +++ b/libs/server/Resp/PubSubCommands.cs @@ -259,7 +259,6 @@ private bool NetworkUNSUBSCRIBE() SendAndReset(); } - Debug.Assert(numActiveChannels == 0); if (numActiveChannels == 0) isSubscriptionSession = false; diff --git a/libs/server/Resp/PurgeBPCommand.cs b/libs/server/Resp/PurgeBPCommand.cs new file mode 100644 index 0000000000..8265e87539 --- /dev/null +++ b/libs/server/Resp/PurgeBPCommand.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using Garnet.common; +using Microsoft.Extensions.Logging; + +namespace Garnet.server +{ + public enum ManagerType : byte + { + /// + /// MigrationManager Buffer Pool + /// + MigrationManager, + /// + /// ReplicationManager BufferPool + /// + ReplicationManager, + /// + /// ServerListener BufferPool + /// + ServerListener, + } + + /// + /// Extension methods for . + /// + internal static class ManagerTypeExtensions + { + public static ReadOnlySpan ToReadOnlySpan(this ManagerType managerType) + { + return managerType switch + { + ManagerType.MigrationManager => "GC completed for MigrationManager"u8, + ManagerType.ReplicationManager => "GC completed for ReplicationManager"u8, + ManagerType.ServerListener => "GC completed for ServerListener"u8, + _ => throw new GarnetException() + }; + } + } + + internal sealed unsafe partial class RespServerSession : ServerSessionBase + { + private bool NetworkPurgeBP() + { + // Expecting exactly 1 argument + if (parseState.Count != 1) + { + return AbortWithWrongNumberOfArguments(nameof(RespCommand.PURGEBP)); + } + + if (!parseState.TryGetEnum(0, ignoreCase: true, out var managerType) || !Enum.IsDefined(managerType)) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_SYNTAX_ERROR, ref dcurr, dend)) + SendAndReset(); + return true; + } + + try + { + var success = true; + switch (managerType) + { + case ManagerType.MigrationManager: + case ManagerType.ReplicationManager: + success = ClusterPurgeBufferPool(managerType); + break; + case ManagerType.ServerListener: + storeWrapper.GetTcpServer().Purge(); + break; + default: + success = false; + while (!RespWriteUtils.WriteError($"ERR Could not purge {managerType}.", ref dcurr, dend)) + SendAndReset(); + break; + } + + if (success) + { + GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true); + while (!RespWriteUtils.WriteSimpleString(managerType.ToReadOnlySpan(), ref dcurr, dend)) + SendAndReset(); + } + } + catch (Exception ex) + { + logger?.LogError(ex, "PURGEBP {type}:{managerType}", managerType, managerType.ToString()); + while (!RespWriteUtils.WriteError($"ERR {ex.Message}", ref dcurr, dend)) + SendAndReset(); + return true; + } + + bool ClusterPurgeBufferPool(ManagerType managerType) + { + if (clusterSession == null) + { + while (!RespWriteUtils.WriteError(CmdStrings.RESP_ERR_GENERIC_CLUSTER_DISABLED, ref dcurr, dend)) + SendAndReset(); + return false; + } + storeWrapper.clusterProvider.PurgeBufferPool(managerType); + return true; + } + + return true; + } + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandArgument.cs b/libs/server/Resp/RespCommandArgument.cs new file mode 100644 index 0000000000..b5c22b1b02 --- /dev/null +++ b/libs/server/Resp/RespCommandArgument.cs @@ -0,0 +1,575 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Garnet.common; + +namespace Garnet.server +{ + /// + /// A base class that represents a RESP command's argument + /// + public abstract class RespCommandArgumentBase : IRespSerializable + { + /// + /// The argument's name + /// + public string Name { get; init; } + + /// + /// The argument's type + /// + public RespCommandArgumentType Type { get; init; } + + /// + /// The argument's display string + /// + public string DisplayText { get; init; } + + /// + /// A constant literal that precedes the argument (user input) itself + /// + public string Token { get; init; } + + /// + /// A short description of the argument + /// + public string Summary { get; init; } + + protected int ArgCount { get; set; } + + /// + /// Argument flags + /// + public RespCommandArgumentFlags ArgumentFlags + { + get => argFlags; + init + { + argFlags = value; + respFormatArgFlags = EnumUtils.GetEnumDescriptions(argFlags); + } + } + + /// + /// Returns the serialized representation of the current object in RESP format + /// This property returns a cached value, if exists (this value should never change after object initialization) + /// + [JsonIgnore] + public string RespFormat => respFormat ??= ToRespFormat(); + + private string respFormat; + private readonly RespCommandArgumentFlags argFlags; + private readonly string[] respFormatArgFlags; + + protected RespCommandArgumentBase(string name, string displayText, RespCommandArgumentType type, string token, string summary, RespCommandArgumentFlags flags) : this() + { + Name = name; + DisplayText = displayText; + Type = type; + Token = token; + Summary = summary; + ArgumentFlags = flags; + } + + /// + /// Empty constructor for JSON deserialization + /// + protected RespCommandArgumentBase() + { + ArgCount = 0; + } + + /// + public virtual string ToRespFormat() + { + var sb = new StringBuilder(); + + var key = "name"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.Name.Length}\r\n{this.Name}\r\n"); + ArgCount += 2; + + key = "type"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + var respType = EnumUtils.GetEnumDescriptions(this.Type)[0]; + sb.Append($"${respType.Length}\r\n{respType}\r\n"); + ArgCount += 2; + + if (this.DisplayText != null) + { + key = "display_text"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.DisplayText.Length}\r\n{this.DisplayText}\r\n"); + ArgCount += 2; + } + + if (this.Token != null) + { + key = "token"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.Token.Length}\r\n{this.Token}\r\n"); + ArgCount += 2; + } + + if (this.Summary != null) + { + key = "summary"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.Summary.Length}\r\n{this.Summary}\r\n"); + ArgCount += 2; + } + + if (this.ArgumentFlags != RespCommandArgumentFlags.None) + { + key = "flags"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"*{respFormatArgFlags.Length}\r\n"); + foreach (var respArgFlag in respFormatArgFlags) + { + sb.Append($"+{respArgFlag}\r\n"); + } + + ArgCount += 2; + } + + return sb.ToString(); + } + } + + /// + /// Represents a RESP command's argument of type key + /// + public sealed class RespCommandKeyArgument : RespCommandArgumentBase + { + /// + /// The argument's value - a string that describes the value in the command's syntax + /// + public string Value { get; init; } + + /// + /// This value is available for every argument of the key type. + /// It is a 0-based index of the specification in the command's key specifications that corresponds to the argument. + /// + public int KeySpecIndex { get; init; } + + public RespCommandKeyArgument(string name, string displayText, string token, + string summary, RespCommandArgumentFlags flags, string value, int keySpecIndex) : base(name, displayText, + RespCommandArgumentType.Key, token, summary, flags) + { + Value = value; + KeySpecIndex = keySpecIndex; + } + + /// + public RespCommandKeyArgument() + { + + } + + /// + public override string ToRespFormat() + { + var baseRespFormat = base.ToRespFormat(); + var sb = new StringBuilder(); + sb.Append(baseRespFormat); + var key = "key_spec_index"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($":{KeySpecIndex}\r\n"); + ArgCount += 2; + sb.Insert(0, $"*{ArgCount}\r\n"); + return sb.ToString(); + } + } + + /// + /// Represents a RESP command's argument of all types except OneOf and Block + /// + public abstract class RespCommandArgument : RespCommandArgumentBase + { + /// + /// The argument's value - a string that describes the value in the command's syntax + /// + public string Value { get; init; } + + protected RespCommandArgument(string name, string displayText, RespCommandArgumentType type, string token, + string summary, RespCommandArgumentFlags flags, string value) : base(name, displayText, type, token, + summary, flags) => this.Value = value; + + protected RespCommandArgument() + { + + } + + /// + public override string ToRespFormat() + { + var baseRespFormat = base.ToRespFormat(); + if (Value == null) return baseRespFormat; + + var sb = new StringBuilder(); + sb.Append(baseRespFormat); + var key = "value"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${Value.Length}\r\n{Value}\r\n"); + ArgCount += 2; + return sb.ToString(); + } + } + + /// + /// Represents a RESP command's argument of all types except OneOf and Block + /// + public sealed class RespCommandBasicArgument : RespCommandArgument + { + public RespCommandBasicArgument(string name, string displayText, RespCommandArgumentType type, string token, + string summary, RespCommandArgumentFlags flags, string value) : base(name, displayText, type, token, + summary, flags, value) + { + + } + + /// + public RespCommandBasicArgument() + { + + } + + /// + public override string ToRespFormat() + { + var baseRespFormat = base.ToRespFormat(); + var sb = new StringBuilder(); + sb.Append($"*{ArgCount}\r\n"); + sb.Append(baseRespFormat); + return sb.ToString(); + } + } + + /// + /// Represents a RESP command's argument of type OneOf or Block + /// + public sealed class RespCommandContainerArgument : RespCommandArgumentBase + { + /// + /// An array of nested arguments + /// + public RespCommandArgumentBase[] Arguments { get; init; } + + public RespCommandContainerArgument(string name, string displayText, RespCommandArgumentType type, string token, + string summary, RespCommandArgumentFlags flags, RespCommandArgumentBase[] arguments) : base(name, + displayText, type, token, summary, flags) + { + this.Arguments = arguments; + } + + /// + public RespCommandContainerArgument() + { + + } + + /// + public override string ToRespFormat() + { + var baseRespFormat = base.ToRespFormat(); + var sb = new StringBuilder(); + sb.Append(baseRespFormat); + if (Arguments != null) + { + var key = "arguments"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"*{Arguments.Length}\r\n"); + foreach (var argument in Arguments) + { + sb.Append(argument.RespFormat); + } + + ArgCount += 2; + } + + sb.Insert(0, $"*{ArgCount}\r\n"); + return sb.ToString(); + } + } + + /// + /// JSON converter for objects implementing RespCommandArgumentBase + /// + public class RespCommandArgumentConverter : JsonConverter + { + /// + public override bool CanConvert(Type typeToConvert) => typeof(RespCommandArgumentBase).IsAssignableFrom(typeToConvert); + + /// + public override RespCommandArgumentBase Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (!typeof(RespCommandArgumentBase).IsAssignableFrom(typeToConvert)) return null; + + if (reader.TokenType != JsonTokenType.StartObject) + { + throw new JsonException(); + } + + reader.Read(); + if (reader.TokenType != JsonTokenType.PropertyName) + { + throw new JsonException(); + } + + var propertyName = reader.GetString(); + if (propertyName != "TypeDiscriminator") + { + throw new JsonException(); + } + + reader.Read(); + if (reader.TokenType != JsonTokenType.String) + { + throw new JsonException(); + } + + var typeDiscriminator = reader.GetString(); + + string name = null; + string displayText = null; + var type = RespCommandArgumentType.None; + string token = null; + string summary = null; + var flags = RespCommandArgumentFlags.None; + var keySpecIdx = -1; + string strVal = null; + RespCommandArgumentBase[] nestedArgs = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return typeDiscriminator switch + { + nameof(RespCommandKeyArgument) => new RespCommandKeyArgument(name, displayText, token, summary, flags, strVal, keySpecIdx), + nameof(RespCommandContainerArgument) => new RespCommandContainerArgument(name, displayText, type, token, summary, flags, nestedArgs), + nameof(RespCommandBasicArgument) => new RespCommandBasicArgument(name, displayText, type, token, summary, flags, strVal), + _ => throw new JsonException() + }; + } + + if (reader.TokenType == JsonTokenType.PropertyName) + { + propertyName = reader.GetString(); + reader.Read(); + + switch (propertyName) + { + case nameof(RespCommandArgumentBase.Name): + name = reader.GetString(); + break; + case nameof(RespCommandArgumentBase.DisplayText): + displayText = reader.GetString(); + break; + case nameof(RespCommandArgumentBase.Type): + type = Enum.Parse(reader.GetString(), true); + break; + case nameof(RespCommandArgumentBase.Token): + token = reader.GetString(); + break; + case nameof(RespCommandArgumentBase.Summary): + summary = reader.GetString(); + break; + case nameof(RespCommandArgumentBase.ArgumentFlags): + flags = Enum.Parse(reader.GetString(), true); + break; + default: + switch (typeDiscriminator) + { + case (nameof(RespCommandKeyArgument)): + switch (propertyName) + { + case nameof(RespCommandKeyArgument.KeySpecIndex): + keySpecIdx = reader.GetInt32(); + break; + case nameof(RespCommandKeyArgument.Value): + strVal = reader.GetString(); + break; + } + break; + case (nameof(RespCommandBasicArgument)): + switch (propertyName) + { + case nameof(RespCommandBasicArgument.Value): + strVal = reader.GetString(); + break; + } + break; + case (nameof(RespCommandContainerArgument)): + switch (propertyName) + { + case nameof(RespCommandContainerArgument.Arguments): + if (reader.TokenType == JsonTokenType.StartArray) + { + var args = new List(); + + while (reader.Read() && reader.TokenType != JsonTokenType.EndArray) + { + var item = JsonSerializer.Deserialize(ref reader, options); + args.Add(item); + } + + nestedArgs = [.. args]; + } + break; + } + break; + } + break; + } + } + } + + throw new JsonException(); + } + + /// + public override void Write(Utf8JsonWriter writer, RespCommandArgumentBase cmdArg, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + writer.WriteString("TypeDiscriminator", cmdArg.GetType().Name); + + writer.WriteString(nameof(RespCommandArgumentBase.Name), cmdArg.Name); + if (cmdArg.DisplayText != null) + writer.WriteString(nameof(RespCommandArgumentBase.DisplayText), cmdArg.DisplayText); + writer.WriteString(nameof(RespCommandArgumentBase.Type), cmdArg.Type.ToString()); + if (cmdArg.Token != null) + writer.WriteString(nameof(RespCommandArgumentBase.Token), cmdArg.Token); + if (cmdArg.Summary != null) + writer.WriteString(nameof(RespCommandArgumentBase.Summary), cmdArg.Summary); + if (cmdArg.ArgumentFlags != RespCommandArgumentFlags.None) + writer.WriteString(nameof(RespCommandArgumentBase.ArgumentFlags), cmdArg.ArgumentFlags.ToString()); + + switch (cmdArg) + { + case RespCommandKeyArgument keyArg: + writer.WriteNumber(nameof(RespCommandKeyArgument.KeySpecIndex), keyArg.KeySpecIndex); + if (keyArg.Value != null) + writer.WriteString(nameof(RespCommandKeyArgument.Value), keyArg.Value); + break; + case RespCommandContainerArgument containerArg: + if (containerArg.Arguments != null) + { + writer.WritePropertyName(nameof(RespCommandContainerArgument.Arguments)); + writer.WriteStartArray(); + foreach (var arg in containerArg.Arguments) + { + JsonSerializer.Serialize(writer, arg, options); + } + writer.WriteEndArray(); + } + break; + case RespCommandBasicArgument respCmdArg: + if (respCmdArg.Value != null) + { + writer.WriteString(nameof(RespCommandKeyArgument.Value), respCmdArg.Value); + } + break; + default: + throw new JsonException(); + } + + writer.WriteEndObject(); + } + } + + /// + /// An enum representing a RESP command argument's type + /// + public enum RespCommandArgumentType : byte + { + None, + + /// + /// A string argument + /// + [Description("string")] + String, + + /// + /// An integer argument + /// + [Description("integer")] + Integer, + + /// + /// A double-precision argument + /// + [Description("double")] + Double, + + /// + /// A string that represents the name of a key + /// + [Description("key")] + Key, + + /// + /// A string that represents a glob-like pattern + /// + [Description("pattern")] + Pattern, + + /// + /// An integer that represents a Unix timestamp + /// + [Description("unix-time")] + UnixTime, + + /// + /// A token, meaning a reserved keyword, which may or may not be provided + /// + [Description("pure-token")] + PureToken, + + /// + /// A container for nested arguments. This type enables choice among several nested arguments. + /// + [Description("oneof")] + OneOf, + + /// + /// A container for nested arguments. This type enables grouping arguments and applying a property. + /// + [Description("block")] + Block, + } + + /// + /// Argument flags + /// + [Flags] + public enum RespCommandArgumentFlags : byte + { + None = 0, + /// + /// Denotes that the argument is optional + /// + [Description("optional")] + Optional = 1, + + /// + /// Denotes that the argument is optional + /// + [Description("multiple")] + Multiple = 1 << 1, + + /// + /// Denotes that the argument is optional + /// + [Description("multiple-token")] + MultipleToken = 1 << 2, + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandDataCommon.cs b/libs/server/Resp/RespCommandDataCommon.cs new file mode 100644 index 0000000000..5364b4283e --- /dev/null +++ b/libs/server/Resp/RespCommandDataCommon.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Runtime.Loader; +using Garnet.common; +using Microsoft.Extensions.Logging; + +namespace Garnet.server.Resp +{ + internal class RespCommandDataCommon + { + /// + /// Path to Garnet.resources.dll, where command data is found + /// + private static readonly string ResourcesAssemblyPath = Path.Combine(AppContext.BaseDirectory, @"Garnet.resources.dll"); + + /// + /// Synchronize loading and unloading of resources assembly + /// + private static readonly object ResourcesAssemblyLock = new object(); + + /// + /// Safely imports commands data from embedded resource in dynamically loaded/unloaded assembly + /// + /// Type of IRespCommandData to import + /// Path to embedded resource + /// Imported data + /// Logger + /// True if imported successfully + internal static bool TryImportRespCommandsData(string path, + out IReadOnlyDictionary commandsData, ILogger logger = null) where TData : class, IRespCommandData + { + lock (ResourcesAssemblyLock) + { + // Create a new unloadable assembly load context + var assemblyLoadContext = new AssemblyLoadContext(null, true); + + try + { + // Load the assembly within the context and import the data + var assembly = assemblyLoadContext.LoadFromAssemblyPath(ResourcesAssemblyPath); + + var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.EmbeddedResource, null, assembly); + var commandsDocsProvider = RespCommandsDataProviderFactory.GetRespCommandsDataProvider(); + + return commandsDocsProvider.TryImportRespCommandsData(path, + streamProvider, out commandsData, logger); + } + finally + { + // Unload the context + assemblyLoadContext.Unload(); + + // Force GC to release the loaded assembly + GC.Collect(); + GC.WaitForPendingFinalizers(); + } + } + } + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandDataProvider.cs b/libs/server/Resp/RespCommandDataProvider.cs new file mode 100644 index 0000000000..062bd52564 --- /dev/null +++ b/libs/server/Resp/RespCommandDataProvider.cs @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Garnet.common; +using Microsoft.Extensions.Logging; +using JsonSerializer = System.Text.Json.JsonSerializer; + +namespace Garnet.server +{ + /// + /// An interface for different RESP command data (e.g. RespCommandInfo, RespCommandDocs) + /// + public interface IRespCommandData + { + /// + /// Garnet's RespCommand enum command representation + /// + public RespCommand Command { get; init; } + + /// + /// The command's name + /// + public string Name { get; init; } + } + + /// + /// An interface for different RESP command data (e.g. RespCommandInfo, RespCommandDocs) + /// + public interface IRespCommandData : IRespCommandData where TCommandData : class, IRespCommandData + { + /// + /// All the command's sub-commands data, if any + /// + TCommandData[] SubCommands { get; } + + /// + /// The parent data of the command + /// + TCommandData Parent { get; set; } + } + + /// + /// Interface for importing / exporting RESP commands data from different file types + /// + public interface IRespCommandsDataProvider where TData : IRespCommandData + { + /// + /// Import RESP commands data from path using a stream provider + /// + /// Path to the file containing the serialized RESP commands data + /// Stream provider to use when reading from the path + /// Logger + /// Outputs a read-only dictionary that maps a command name to its matching data + /// True if import succeeded + bool TryImportRespCommandsData(string path, IStreamProvider streamProvider, out IReadOnlyDictionary commandsData, ILogger logger = null); + + /// + /// Export RESP commands date to path using a stream provider + /// + /// Path to the file to write into + /// Stream provider to use when writing to the path + /// Dictionary that maps a command name to its matching data + /// Logger + /// True if export succeeded + bool TryExportRespCommandsData(string path, IStreamProvider streamProvider, IReadOnlyDictionary commandsData, ILogger logger = null); + } + + public class RespCommandsDataProviderFactory + { + /// + /// Get an IRespCommandsDataProvider instance based on its file type + /// + /// The RESP commands data file type + /// IRespCommandsDataProvider instance + /// + public static IRespCommandsDataProvider GetRespCommandsDataProvider( + RespCommandsDataFileType fileType = RespCommandsDataFileType.Default) where TData : class, IRespCommandData + { + switch (fileType) + { + case RespCommandsDataFileType.Default: + return DefaultRespCommandsDataProvider.Instance; + default: + throw new NotImplementedException($"No RespCommandsDataProvider exists for file type: {fileType}."); + } + } + } + + /// + /// Default commands data provider (JSON serialized array of data objects) + /// + internal class DefaultRespCommandsDataProvider : IRespCommandsDataProvider where TData : class, IRespCommandData + { + private static readonly Lazy> LazyInstance; + + public static IRespCommandsDataProvider Instance => LazyInstance.Value; + + private static readonly JsonSerializerOptions SerializerOptions = new() + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingDefault, + Converters = { new JsonStringEnumConverter(), new KeySpecConverter(), new RespCommandArgumentConverter() } + }; + + static DefaultRespCommandsDataProvider() + { + LazyInstance = new(() => new DefaultRespCommandsDataProvider()); + } + + private DefaultRespCommandsDataProvider() + { + } + + public bool TryImportRespCommandsData(string path, IStreamProvider streamProvider, out IReadOnlyDictionary commandsData, ILogger logger = null) + { + using var stream = streamProvider.Read(path); + using var streamReader = new StreamReader(stream); + + commandsData = default; + + try + { + var respJson = streamReader.ReadToEnd(); + var respCommandsData = JsonSerializer.Deserialize(respJson, SerializerOptions)!; + + var tmpRespCommandsData = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var data in respCommandsData) + { + tmpRespCommandsData.Add(data.Name, data); + if (data.SubCommands != null) + { + foreach (var subCommand in data.SubCommands) + { + subCommand.Parent = data; + } + } + } + + commandsData = new ReadOnlyDictionary(tmpRespCommandsData); + } + catch (JsonException je) + { + logger?.LogError(je, "An error occurred while parsing resp command data file (Path: {path}).", path); + return false; + } + + return true; + } + + public bool TryExportRespCommandsData(string path, IStreamProvider streamProvider, IReadOnlyDictionary commandsData, ILogger logger = null) + { + string jsonSettings; + + var dataToSerialize = commandsData.Values.OrderBy(ci => ci.Name).ToArray(); + try + { + jsonSettings = JsonSerializer.Serialize(dataToSerialize, SerializerOptions); + } + catch (NotSupportedException e) + { + logger?.LogError(e, "An error occurred while serializing resp commands data file (Path: {path}).", path); + return false; + } + + var data = Encoding.ASCII.GetBytes(jsonSettings); + streamProvider.Write(path, data); + + return true; + } + } + + /// + /// Current supported RESP commands data file types + /// + public enum RespCommandsDataFileType + { + // Default file format (JSON serialized array of data objects) + Default = 0, + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandDocs.cs b/libs/server/Resp/RespCommandDocs.cs new file mode 100644 index 0000000000..146cab3d2d --- /dev/null +++ b/libs/server/Resp/RespCommandDocs.cs @@ -0,0 +1,366 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Text.Json.Serialization; +using Garnet.common; +using Garnet.server.Resp; +using Microsoft.Extensions.Logging; + +namespace Garnet.server +{ + /// + /// Represents a RESP command's docs + /// + public class RespCommandDocs : IRespSerializable, IRespCommandData + { + /// + public RespCommand Command { get; init; } + + /// + public string Name { get; init; } + + /// + /// Short command description + /// + public string Summary { get; init; } + + /// + /// The functional group to which the command belong + /// + public RespCommandGroup Group { get; init; } + + /// + /// A short explanation about the command's time complexity + /// + public string Complexity { get; init; } + + /// + /// Documentation flags + /// + public RespCommandDocFlags DocFlags + { + get => docFlags; + init + { + docFlags = value; + respFormatDocFlags = EnumUtils.GetEnumDescriptions(docFlags); + } + } + + /// + /// The alternative for a deprecated command + /// + public string ReplacedBy { get; init; } + + /// + /// The command's arguments + /// + public RespCommandDocs[] SubCommands { get; init; } + + /// + [JsonIgnore] + public RespCommandDocs Parent { get; set; } + + /// + /// The command's arguments + /// + public RespCommandArgumentBase[] Arguments { get; init; } + + /// + /// Returns the serialized representation of the current object in RESP format + /// This property returns a cached value, if exists (this value should never change after object initialization) + /// + [JsonIgnore] + public string RespFormat => respFormat ??= ToRespFormat(); + + private const string RespCommandsDocsEmbeddedFileName = @"RespCommandsDocs.json"; + + private string respFormat; + private readonly RespCommandDocFlags docFlags; + private readonly string[] respFormatDocFlags; + + private static bool IsInitialized = false; + private static readonly object IsInitializedLock = new(); + private static IReadOnlyDictionary AllRespCommandsDocs = null; + private static IReadOnlyDictionary AllRespSubCommandsDocs = null; + private static IReadOnlyDictionary ExternalRespCommandsDocs = null; + private static IReadOnlyDictionary ExternalRespSubCommandsDocs = null; + + public RespCommandDocs(RespCommand command, string name, string summary, RespCommandGroup group, string complexity, + RespCommandDocFlags docFlags, string replacedBy, RespCommandArgumentBase[] args, RespCommandDocs[] subCommands) : this() + { + Command = command; + Name = name; + Summary = summary; + Group = group; + Complexity = complexity; + DocFlags = docFlags; + ReplacedBy = replacedBy; + Arguments = args; + SubCommands = subCommands; + } + + /// + /// Empty constructor for JSON deserialization + /// + public RespCommandDocs() + { + + } + + private static bool TryInitialize(ILogger logger) + { + lock (IsInitializedLock) + { + if (IsInitialized) return true; + + IsInitialized = TryInitializeRespCommandsDocs(logger); + return IsInitialized; + } + } + + private static bool TryInitializeRespCommandsDocs(ILogger logger = null) + { + var importSucceeded = RespCommandDataCommon.TryImportRespCommandsData( + RespCommandsDocsEmbeddedFileName, + out var tmpAllRespCommandsDocs, logger); + + if (!importSucceeded) return false; + + if (!RespCommandsInfo.TryGetRespCommandNames(out var allExternalCommands, true, logger)) + return false; + + var tmpAllSubCommandsDocs = new Dictionary(StringComparer.OrdinalIgnoreCase); + var tmpExternalSubCommandsDocs = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var kvp in tmpAllRespCommandsDocs) + { + if (kvp.Value.SubCommands == null) continue; + + foreach (var sc in kvp.Value.SubCommands) + { + tmpAllSubCommandsDocs.Add(sc.Name, sc); + + // If parent command or sub-subcommand info mark the command as internal, + // don't add it to the external sub-command map + if (!RespCommandsInfo.TryGetRespCommandInfo(sc.Command, out var subCmdInfo) || + subCmdInfo.IsInternal || subCmdInfo.Parent.IsInternal) + continue; + + tmpExternalSubCommandsDocs.Add(sc.Name, sc); + } + } + + AllRespCommandsDocs = + new Dictionary(tmpAllRespCommandsDocs, StringComparer.OrdinalIgnoreCase); + AllRespSubCommandsDocs = new ReadOnlyDictionary(tmpAllSubCommandsDocs); + ExternalRespCommandsDocs = new ReadOnlyDictionary(tmpAllRespCommandsDocs + .Where(ci => allExternalCommands.Contains(ci.Key)) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.OrdinalIgnoreCase)); + ExternalRespSubCommandsDocs = new ReadOnlyDictionary(tmpExternalSubCommandsDocs); + + return true; + } + + /// + /// Gets all the command docs objects of commands supported by Garnet + /// + /// Mapping between command name to command docs + /// Return only commands that are visible externally + /// Logger + /// True if initialization was successful and data was retrieved successfully + public static bool TryGetRespCommandsDocs(out IReadOnlyDictionary respCommandsDocs, bool externalOnly = false, ILogger logger = null) + { + respCommandsDocs = default; + if (!IsInitialized && !TryInitialize(logger)) return false; + + respCommandsDocs = externalOnly ? ExternalRespCommandsDocs : AllRespCommandsDocs; + return true; + } + + /// + /// Gets command docs by command name + /// + /// The command name + /// The command docs + /// Return command docs only if command is visible externally + /// Include sub-commands in command name search + /// Logger + /// True if initialization was successful and command docs was found + internal static bool TryGetRespCommandDocs(string cmdName, out RespCommandDocs respCommandsDocs, bool externalOnly = false, bool includeSubCommands = false, ILogger logger = null) + { + respCommandsDocs = default; + + return (TryGetRespCommandsDocs(out var cmdsDocs, externalOnly, logger) + && cmdsDocs.TryGetValue(cmdName, out respCommandsDocs)) + || (includeSubCommands && TryGetRespSubCommandsDocs(out var subCmdsDocs, externalOnly, logger) + && subCmdsDocs.TryGetValue(cmdName, out respCommandsDocs)); + } + + /// + /// Gets all the command docs of sub-commands supported by Garnet + /// + /// Mapping between sub-command name to command docs + /// Return only sub-commands that are visible externally + /// Logger + /// True if initialization was successful and data was retrieved successfully + public static bool TryGetRespSubCommandsDocs(out IReadOnlyDictionary respSubCommandsDocs, bool externalOnly = false, ILogger logger = null) + { + respSubCommandsDocs = default; + if (!IsInitialized && !TryInitialize(logger)) return false; + + respSubCommandsDocs = externalOnly ? ExternalRespSubCommandsDocs : AllRespSubCommandsDocs; + return true; + } + + /// + public string ToRespFormat() + { + var sb = new StringBuilder(); + var argCount = 0; + + string key; + + if (this.Summary != null) + { + key = "summary"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.Summary.Length}\r\n{this.Summary}\r\n"); + argCount += 2; + } + + key = "group"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + var respType = EnumUtils.GetEnumDescriptions(this.Group)[0]; + sb.Append($"${respType.Length}\r\n{respType}\r\n"); + argCount += 2; + + if (this.Complexity != null) + { + key = "complexity"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.Complexity.Length}\r\n{this.Complexity}\r\n"); + argCount += 2; + } + + if (this.DocFlags != RespCommandDocFlags.None) + { + key = "doc_flags"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"*{respFormatDocFlags.Length}\r\n"); + foreach (var respDocFlag in respFormatDocFlags) + { + sb.Append($"+{respDocFlag.Length}\r\n"); + } + + argCount += 2; + } + + if (this.ReplacedBy != null) + { + key = "replaced_by"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"${this.ReplacedBy.Length}\r\n{this.ReplacedBy}\r\n"); + argCount += 2; + } + + if (Arguments != null) + { + key = "arguments"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"*{Arguments.Length}\r\n"); + foreach (var argument in Arguments) + { + sb.Append(argument.RespFormat); + } + + argCount += 2; + } + + if (SubCommands != null) + { + key = "subcommands"; + sb.Append($"${key.Length}\r\n{key}\r\n"); + sb.Append($"*{SubCommands.Length * 2}\r\n"); + foreach (var subCommand in SubCommands) + { + sb.Append(subCommand.RespFormat); + } + + argCount += 2; + } + + sb.Insert(0, $"${Name.Length}\r\n{Name}\r\n*{argCount}\r\n"); + return sb.ToString(); + } + } + + /// + /// Enum representing the functional group to which the command belongs + /// + public enum RespCommandGroup : byte + { + None, + [Description("bitmap")] + Bitmap, + [Description("cluster")] + Cluster, + [Description("connection")] + Connection, + [Description("generic")] + Generic, + [Description("geo")] + Geo, + [Description("hash")] + Hash, + [Description("hyperloglog")] + HyperLogLog, + [Description("list")] + List, + [Description("module")] + Module, + [Description("pubsub")] + PubSub, + [Description("scripting")] + Scripting, + [Description("sentinel")] + Sentinel, + [Description("server")] + Server, + [Description("set")] + Set, + [Description("sorted-set")] + SortedSet, + [Description("stream")] + Stream, + [Description("string")] + String, + [Description("transactions")] + Transactions, + } + + /// + /// Documentation flags + /// + [Flags] + public enum RespCommandDocFlags : byte + { + None = 0, + /// + /// The command is deprecated + /// + [Description("deprecated")] + Deprecated = 1, + + /// + /// A system command that isn't meant to be called by users + /// + [Description("syscmd")] + SysCmd = 1 << 1, + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandsInfo.cs b/libs/server/Resp/RespCommandsInfo.cs index 81f9455841..ea66f4bf26 100644 --- a/libs/server/Resp/RespCommandsInfo.cs +++ b/libs/server/Resp/RespCommandsInfo.cs @@ -7,11 +7,10 @@ using System.Collections.ObjectModel; using System.Linq; using System.Numerics; -using System.Reflection; using System.Text; using System.Text.Json.Serialization; using Garnet.common; -using Garnet.server.ACL; +using Garnet.server.Resp; using Microsoft.Extensions.Logging; namespace Garnet.server @@ -19,16 +18,12 @@ namespace Garnet.server /// /// Represents a RESP command's information /// - public class RespCommandsInfo : IRespSerializable + public class RespCommandsInfo : IRespSerializable, IRespCommandData { - /// - /// Garnet's RespCommand enum command representation - /// + /// public RespCommand Command { get; init; } - /// - /// The command's name - /// + /// public string Name { get; init; } /// @@ -94,9 +89,7 @@ public RespAclCategories AclCategories /// public RespCommandKeySpecification[] KeySpecifications { get; init; } - /// - /// All the command's sub-commands, if any - /// + /// public RespCommandsInfo[] SubCommands { get; init; } /// @@ -106,21 +99,21 @@ public RespAclCategories AclCategories [JsonIgnore] public string RespFormat => respFormat ??= ToRespFormat(); + /// [JsonIgnore] public RespCommandsInfo Parent { get; set; } - [JsonIgnore] - public RespCommand? SubCommand { get; set; } - - private const string RespCommandsEmbeddedFileName = @"RespCommandsInfo.json"; + private const string RespCommandsInfoEmbeddedFileName = @"RespCommandsInfo.json"; private string respFormat; private static bool IsInitialized = false; private static readonly object IsInitializedLock = new(); private static IReadOnlyDictionary AllRespCommandsInfo = null; + private static IReadOnlyDictionary AllRespSubCommandsInfo = null; private static IReadOnlyDictionary ExternalRespCommandsInfo = null; - private static IReadOnlyDictionary BasicRespCommandsInfo = null; + private static IReadOnlyDictionary ExternalRespSubCommandsInfo = null; + private static IReadOnlyDictionary FlattenedRespCommandsInfo = null; private static IReadOnlySet AllRespCommandNames = null; private static IReadOnlySet ExternalRespCommandNames = null; private static IReadOnlyDictionary> AclCommandInfo = null; @@ -146,55 +139,12 @@ private static bool TryInitialize(ILogger logger) private static bool TryInitializeRespCommandsInfo(ILogger logger = null) { - var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.EmbeddedResource, null, - Assembly.GetExecutingAssembly()); - var commandsInfoProvider = RespCommandsInfoProviderFactory.GetRespCommandsInfoProvider(); - - var importSucceeded = commandsInfoProvider.TryImportRespCommandsInfo(RespCommandsEmbeddedFileName, - streamProvider, out var scratchAllRespCommandsInfo, logger); + var importSucceeded = RespCommandDataCommon.TryImportRespCommandsData(RespCommandsInfoEmbeddedFileName, + out var tmpAllRespCommandsInfo, logger); if (!importSucceeded) return false; - // force sub commands into a well known order so we can quickly validate them against ACL lists - // setup parent refs so we can navigate from child -> parent - - // todo: remove all of this once sub command ids is dead - - var tmpAllRespCommandsInfo = - scratchAllRespCommandsInfo.ToDictionary( - static kv => kv.Key, - static kv => - { - if (kv.Value.SubCommands != null) - { - SetupSubCommands(kv.Value); - } - - return kv.Value; - - static void SetupSubCommands(RespCommandsInfo cmd) - { - foreach (var subCommand in cmd.SubCommands) - { - subCommand.Parent = cmd; - - if (!Enum.TryParse(subCommand.Name.Replace("|", "_").Replace("-", ""), out RespCommand parsed)) - { - throw new ACLException($"Couldn't map '{subCommand.Name}' to a member of {nameof(RespCommand)} this will break ACLs"); - } - - subCommand.SubCommand = parsed; - - if (subCommand.SubCommands != null) - { - SetupSubCommands(subCommand); - } - } - } - } - ); - - var tmpBasicRespCommandsInfo = new Dictionary(); + var tmpFlattenedRespCommandsInfo = new Dictionary(); foreach (var respCommandInfo in tmpAllRespCommandsInfo.Values) { if (respCommandInfo.Command == RespCommand.NONE) continue; @@ -203,25 +153,40 @@ static void SetupSubCommands(RespCommandsInfo cmd) // So let's prefer the SECONDARYOF or REPLICAOF alternatives if (respCommandInfo.Name == "SLAVEOF") continue; - tmpBasicRespCommandsInfo.Add(respCommandInfo.Command, respCommandInfo); + tmpFlattenedRespCommandsInfo.Add(respCommandInfo.Command, respCommandInfo); if (respCommandInfo.SubCommands != null) { foreach (var subRespCommandInfo in respCommandInfo.SubCommands) { - tmpBasicRespCommandsInfo.Add(subRespCommandInfo.SubCommand.Value, subRespCommandInfo); + tmpFlattenedRespCommandsInfo.Add(subRespCommandInfo.Command, subRespCommandInfo); } } } - AllRespCommandsInfo = - new Dictionary(tmpAllRespCommandsInfo, StringComparer.OrdinalIgnoreCase); + var tmpAllSubCommandsInfo = new Dictionary(StringComparer.OrdinalIgnoreCase); + var tmpExternalSubCommandsInfo = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var kvp in tmpAllRespCommandsInfo) + { + if (kvp.Value.SubCommands == null) continue; + + foreach (var sc in kvp.Value.SubCommands) + { + tmpAllSubCommandsInfo.Add(sc.Name, sc); + if (!kvp.Value.IsInternal && !sc.IsInternal) + tmpExternalSubCommandsInfo.Add(sc.Name, sc); + } + } + + AllRespCommandsInfo = tmpAllRespCommandsInfo; + AllRespSubCommandsInfo = new ReadOnlyDictionary(tmpAllSubCommandsInfo); ExternalRespCommandsInfo = new ReadOnlyDictionary(tmpAllRespCommandsInfo .Where(ci => !ci.Value.IsInternal) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.OrdinalIgnoreCase)); + ExternalRespSubCommandsInfo = new ReadOnlyDictionary(tmpExternalSubCommandsInfo); AllRespCommandNames = ImmutableHashSet.Create(StringComparer.OrdinalIgnoreCase, AllRespCommandsInfo.Keys.ToArray()); ExternalRespCommandNames = ImmutableHashSet.Create(StringComparer.OrdinalIgnoreCase, ExternalRespCommandsInfo.Keys.ToArray()); - BasicRespCommandsInfo = new ReadOnlyDictionary(tmpBasicRespCommandsInfo); + FlattenedRespCommandsInfo = new ReadOnlyDictionary(tmpFlattenedRespCommandsInfo); AclCommandInfo = new ReadOnlyDictionary>( @@ -238,7 +203,7 @@ static void SetupSubCommands(RespCommandsInfo cmd) FastBasicRespCommandsInfo = new RespCommandsInfo[(int)RespCommandExtensions.LastWriteCommand() - (int)RespCommandExtensions.FirstReadCommand()]; for (var i = (int)RespCommandExtensions.FirstReadCommand(); i < (int)RespCommandExtensions.LastWriteCommand(); i++) { - BasicRespCommandsInfo.TryGetValue((RespCommand)i, out var commandInfo); + FlattenedRespCommandsInfo.TryGetValue((RespCommand)i, out var commandInfo); FastBasicRespCommandsInfo[i - 1] = commandInfo; } @@ -327,16 +292,19 @@ public static bool TryGetRespCommandNames(out IReadOnlySet respCommandNa /// /// The command name /// The command info + /// Return command info only if command is visible externally + /// Include sub-commands in command name search /// Logger /// True if initialization was successful and command info was found - internal static bool TryGetRespCommandInfo(string cmdName, out RespCommandsInfo respCommandsInfo, ILogger logger = null) + internal static bool TryGetRespCommandInfo(string cmdName, out RespCommandsInfo respCommandsInfo, + bool externalOnly = false, bool includeSubCommands = false, ILogger logger = null) { respCommandsInfo = default; - if ((!IsInitialized && !TryInitialize(logger)) || - !AllRespCommandsInfo.ContainsKey(cmdName)) return false; - respCommandsInfo = AllRespCommandsInfo[cmdName]; - return true; + return ((TryGetRespCommandsInfo(out var cmdsInfo, externalOnly, logger) + && cmdsInfo.TryGetValue(cmdName, out respCommandsInfo)) || + ((includeSubCommands && TryGetRespSubCommandsInfo(out var subCmdsInfo, externalOnly, logger)) + && subCmdsInfo.TryGetValue(cmdName, out respCommandsInfo))); } /// @@ -354,8 +322,8 @@ public static bool TryGetRespCommandInfo(RespCommand cmd, if (!IsInitialized && !TryInitialize(logger)) return false; RespCommandsInfo tmpRespCommandInfo = default; - if (BasicRespCommandsInfo.ContainsKey(cmd)) - tmpRespCommandInfo = BasicRespCommandsInfo[cmd]; + if (FlattenedRespCommandsInfo.ContainsKey(cmd)) + tmpRespCommandInfo = FlattenedRespCommandsInfo[cmd]; if (tmpRespCommandInfo == default || (txnOnly && tmpRespCommandInfo.Flags.HasFlag(RespCommandFlags.NoMulti))) return false; @@ -378,12 +346,28 @@ public static bool TryFastGetRespCommandInfo(RespCommand cmd, out RespCommandsIn var offset = (int)cmd - 1; if (offset < 0 || offset >= FastBasicRespCommandsInfo.Length) - return true; + return false; respCommandsInfo = FastBasicRespCommandsInfo[offset]; return true; } + /// + /// Gets all the command info objects of sub-commands supported by Garnet + /// + /// Mapping between sub-command name to command info + /// Return only sub-commands that are visible externally + /// Logger + /// True if initialization was successful and data was retrieved successfully + public static bool TryGetRespSubCommandsInfo(out IReadOnlyDictionary respSubCommandsInfo, bool externalOnly = false, ILogger logger = null) + { + respSubCommandsInfo = default; + if (!IsInitialized && !TryInitialize(logger)) return false; + + respSubCommandsInfo = externalOnly ? ExternalRespSubCommandsInfo : AllRespSubCommandsInfo; + return true; + } + /// /// Serializes the current object to RESP format /// diff --git a/libs/server/Resp/RespCommandsInfoProvider.cs b/libs/server/Resp/RespCommandsInfoProvider.cs deleted file mode 100644 index 8040d9ba36..0000000000 --- a/libs/server/Resp/RespCommandsInfoProvider.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. - -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.IO; -using System.Linq; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using Garnet.common; -using Microsoft.Extensions.Logging; -using JsonSerializer = System.Text.Json.JsonSerializer; - -namespace Garnet.server -{ - /// - /// Interface for importing / exporting RESP commands info from different file types - /// - public interface IRespCommandsInfoProvider - { - /// - /// Import RESP commands info from path using a stream provider - /// - /// Path to the file containing the serialized RESP commands info - /// Stream provider to use when reading from the path - /// Logger - /// Outputs a read-only dictionary that maps a command name to its matching RespCommandsInfo - /// True if import succeeded - bool TryImportRespCommandsInfo(string path, IStreamProvider streamProvider, out IReadOnlyDictionary commandsInfo, ILogger logger = null); - - /// - /// Export RESP commands info to path using a stream provider - /// - /// Path to the file to write into - /// Stream provider to use when writing to the path - /// Dictionary that maps a command name to its matching RespCommandsInfo - /// Logger - /// True if export succeeded - bool TryExportRespCommandsInfo(string path, IStreamProvider streamProvider, IReadOnlyDictionary commandsInfo, ILogger logger = null); - } - - public class RespCommandsInfoProviderFactory - { - /// - /// Get an IRespCommandsInfoProvider instance based on its file type - /// - /// The RESP commands info file type - /// IRespCommandsInfoProvider instance - /// - public static IRespCommandsInfoProvider GetRespCommandsInfoProvider(RespCommandsObjectFileType fileType = RespCommandsObjectFileType.Default) - { - switch (fileType) - { - case RespCommandsObjectFileType.Default: - return DefaultRespCommandsInfoProvider.Instance; - default: - throw new NotImplementedException($"No RespCommandsInfoProvider exists for file type: {fileType}."); - } - } - } - - /// - /// Default commands info provider (JSON serialized array of RespCommandsInfo objects) - /// - internal class DefaultRespCommandsInfoProvider : IRespCommandsInfoProvider - { - private static readonly Lazy LazyInstance; - - public static IRespCommandsInfoProvider Instance => LazyInstance.Value; - - private static readonly JsonSerializerOptions SerializerOptions = new() - { - WriteIndented = true, - Converters = { new JsonStringEnumConverter(), new KeySpecConverter() } - }; - - static DefaultRespCommandsInfoProvider() - { - LazyInstance = new(() => new DefaultRespCommandsInfoProvider()); - } - - private DefaultRespCommandsInfoProvider() - { - } - - public bool TryImportRespCommandsInfo(string path, IStreamProvider streamProvider, out IReadOnlyDictionary commandsInfo, ILogger logger = null) - { - using var stream = streamProvider.Read(path); - using var streamReader = new StreamReader(stream); - - commandsInfo = default; - - try - { - string respJson = streamReader.ReadToEnd(); - var respCommands = JsonSerializer.Deserialize(respJson, SerializerOptions)!; - - var tmpRespCommandsInfo = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var respCommandsInfo in respCommands) - { - tmpRespCommandsInfo.Add(respCommandsInfo.Name, respCommandsInfo); - } - - commandsInfo = new ReadOnlyDictionary(tmpRespCommandsInfo); - } - catch (JsonException je) - { - logger?.LogError(je, "An error occurred while parsing resp commands info file (Path: {path}).", path); - return false; - } - - return true; - } - - public bool TryExportRespCommandsInfo(string path, IStreamProvider streamProvider, IReadOnlyDictionary commandsInfo, ILogger logger = null) - { - string jsonSettings; - - var commandsInfoToSerialize = commandsInfo.Values.OrderBy(ci => ci.Name).ToArray(); - try - { - jsonSettings = JsonSerializer.Serialize(commandsInfoToSerialize, SerializerOptions); - } - catch (NotSupportedException e) - { - logger?.LogError(e, "An error occurred while serializing resp commands info file (Path: {path}).", path); - return false; - } - - var data = Encoding.ASCII.GetBytes(jsonSettings); - streamProvider.Write(path, data); - - return true; - } - } - - /// - /// Current supported RESP commands info file types - /// - public enum RespCommandsObjectFileType - { - // Default file format (JSON serialized array of RespCommandsInfo objects) - Default = 0, - } -} \ No newline at end of file diff --git a/libs/server/Resp/RespServerSession.cs b/libs/server/Resp/RespServerSession.cs index d994f1a7bd..028255b1a6 100644 --- a/libs/server/Resp/RespServerSession.cs +++ b/libs/server/Resp/RespServerSession.cs @@ -6,7 +6,6 @@ using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; -using System.Security.Cryptography; using System.Text; using Garnet.common; using Garnet.common.Parsing; @@ -93,7 +92,6 @@ internal sealed unsafe partial class RespServerSession : ServerSessionBase public readonly StorageSession storageSession; internal BasicGarnetApi basicGarnetApi; internal LockableGarnetApi lockableGarnetApi; - internal CollectionItemBroker itemBroker; readonly IGarnetAuthenticator _authenticator; @@ -109,8 +107,15 @@ internal sealed unsafe partial class RespServerSession : ServerSessionBase /// public byte SessionAsking { get; set; } - // Track whether the incoming network batch had some admin command - bool hasAdminCommand; + /// + /// If set, commands can use this to enumerate details about the server or other sessions. + /// + /// It is not guaranteed to be set. + /// + public IGarnetServer Server { get; set; } + + // Track whether the incoming network batch contains slow commands that should not be counter in NET_RS histogram + bool containsSlowCommand; readonly CustomCommandManagerSession customCommandManagerSession; @@ -160,11 +165,21 @@ internal sealed unsafe partial class RespServerSession : ServerSessionBase /// internal readonly SessionScriptCache sessionScriptCache; + /// + /// Identifier for session - used for CLIENT and related commands. + /// + public long Id { get; } + + /// + /// when this was created. + /// + public long CreationTicks { get; } + public RespServerSession( + long id, INetworkSender networkSender, StoreWrapper storeWrapper, SubscribeBroker> subscribeBroker, - CollectionItemBroker itemBroker, IGarnetAuthenticator authenticator, bool enableScripts) : base(networkSender) @@ -172,22 +187,24 @@ public RespServerSession( this.customCommandManagerSession = new CustomCommandManagerSession(storeWrapper.customCommandManager); this.sessionMetrics = storeWrapper.serverOptions.MetricsSamplingFrequency > 0 ? new GarnetSessionMetrics() : null; this.LatencyMetrics = storeWrapper.serverOptions.LatencyMonitor ? new GarnetLatencyMetricsSession(storeWrapper.monitor) : null; - logger = storeWrapper.sessionLogger != null ? new SessionLogger(storeWrapper.sessionLogger, $"[{storeWrapper.localEndpoint}] [{networkSender?.RemoteEndpointName}] [{GetHashCode():X8}] ") : null; + logger = storeWrapper.sessionLogger != null ? new SessionLogger(storeWrapper.sessionLogger, $"[{networkSender?.RemoteEndpointName}] [{GetHashCode():X8}] ") : null; + + this.Id = id; + this.CreationTicks = Environment.TickCount64; - logger?.LogDebug("Starting RespServerSession"); + logger?.LogDebug("Starting RespServerSession Id={0}", this.Id); // Initialize session-local scratch buffer of size 64 bytes, used for constructing arguments in GarnetApi this.scratchBufferManager = new ScratchBufferManager(); // Create storage session and API - this.storageSession = new StorageSession(storeWrapper, scratchBufferManager, sessionMetrics, LatencyMetrics, itemBroker, logger); + this.storageSession = new StorageSession(storeWrapper, scratchBufferManager, sessionMetrics, LatencyMetrics, logger); this.basicGarnetApi = new BasicGarnetApi(storageSession, storageSession.basicContext, storageSession.objectStoreBasicContext); this.lockableGarnetApi = new LockableGarnetApi(storageSession, storageSession.lockableContext, storageSession.objectStoreLockableContext); this.storeWrapper = storeWrapper; this.subscribeBroker = subscribeBroker; - this.itemBroker = itemBroker; this._authenticator = authenticator ?? storeWrapper.serverOptions.AuthSettings?.CreateAuthenticator(this.storeWrapper) ?? new GarnetNoAuthAuthenticator(); if (storeWrapper.serverOptions.EnableLua && enableScripts) @@ -224,7 +241,7 @@ internal void SetUser(User user) public override void Dispose() { - logger?.LogDebug("Disposing RespServerSession"); + logger?.LogDebug("Disposing RespServerSession Id={0}", this.Id); if (recvBufferPtr != null) { @@ -235,7 +252,7 @@ public override void Dispose() storeWrapper.monitor.AddMetricsHistorySessionDispose(sessionMetrics, latencyMetrics); subscribeBroker?.RemoveSubscription(this); - itemBroker?.HandleSessionDisposed(this); + storeWrapper.itemBroker?.HandleSessionDisposed(this); sessionScriptCache?.Dispose(); // Cancel the async processor, if any @@ -316,8 +333,11 @@ public override int TryConsumeMessages(byte* reqBuffer, int bytesReceived) logger?.Log(ex.LogLevel, ex, "ProcessMessages threw a GarnetException:"); // Forward Garnet error as RESP error - while (!RespWriteUtils.WriteError($"ERR Garnet Exception: {ex.Message}", ref dcurr, dend)) - SendAndReset(); + if (ex.ClientResponse) + { + while (!RespWriteUtils.WriteError($"ERR Garnet Exception: {ex.Message}", ref dcurr, dend)) + SendAndReset(); + } // Send message and dispose the network sender to end the session if (dcurr > networkSender.GetResponseObjectHead()) @@ -337,6 +357,7 @@ public override int TryConsumeMessages(byte* reqBuffer, int bytesReceived) { networkSender.ExitAndReturnResponseObject(); clusterSession?.ReleaseCurrentEpoch(); + scratchBufferManager.Reset(); } if (txnManager.IsSkippingOperations()) @@ -347,10 +368,10 @@ public override int TryConsumeMessages(byte* reqBuffer, int bytesReceived) { if (latencyMetrics != null) { - if (hasAdminCommand) + if (containsSlowCommand) { latencyMetrics.StopAndSwitch(LatencyMetricsType.NET_RS_LAT, LatencyMetricsType.NET_RS_LAT_ADMIN); - hasAdminCommand = false; + containsSlowCommand = false; } else latencyMetrics.Stop(LatencyMetricsType.NET_RS_LAT); @@ -411,7 +432,7 @@ private void ProcessMessages() } else { - if (CanServeSlot(cmd)) + if (clusterSession == null || CanServeSlot(cmd)) _ = ProcessBasicCommands(cmd, ref basicGarnetApi); } } @@ -421,6 +442,10 @@ private void ProcessMessages() SendAndReset(); } } + else + { + containsSlowCommand = true; + } // Advance read head variables to process the next command _origReadHead = readHead = endReadHead; @@ -474,6 +499,10 @@ private bool MakeUpperCase(byte* ptr) private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi storageApi) where TGarnetApi : IGarnetApi { + /* + * WARNING: Do not add any command here classified as @slow! + * Only @fast commands otherwise latency tracking will break for NET_RS (check how containsSlowCommand is used). + */ _ = cmd switch { RespCommand.GET => NetworkGET(ref storageApi), @@ -483,9 +512,12 @@ private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi st RespCommand.SETEXNX => NetworkSETEXNX(ref storageApi), RespCommand.DEL => NetworkDEL(ref storageApi), RespCommand.RENAME => NetworkRENAME(ref storageApi), + RespCommand.RENAMENX => NetworkRENAMENX(ref storageApi), RespCommand.EXISTS => NetworkEXISTS(ref storageApi), RespCommand.EXPIRE => NetworkEXPIRE(RespCommand.EXPIRE, ref storageApi), RespCommand.PEXPIRE => NetworkEXPIRE(RespCommand.PEXPIRE, ref storageApi), + RespCommand.EXPIRETIME => NetworkEXPIRETIME(RespCommand.EXPIRETIME, ref storageApi), + RespCommand.PEXPIRETIME => NetworkEXPIRETIME(RespCommand.PEXPIRETIME, ref storageApi), RespCommand.PERSIST => NetworkPERSIST(ref storageApi), RespCommand.GETRANGE => NetworkGetRange(ref storageApi), RespCommand.TTL => NetworkTTL(RespCommand.TTL, ref storageApi), @@ -493,6 +525,7 @@ private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi st RespCommand.SETRANGE => NetworkSetRange(ref storageApi), RespCommand.GETDEL => NetworkGETDEL(ref storageApi), RespCommand.APPEND => NetworkAppend(ref storageApi), + RespCommand.STRLEN => NetworkSTRLEN(ref storageApi), RespCommand.INCR => NetworkIncrement(RespCommand.INCR, ref storageApi), RespCommand.INCRBY => NetworkIncrement(RespCommand.INCRBY, ref storageApi), RespCommand.DECR => NetworkIncrement(RespCommand.DECR, ref storageApi), @@ -502,7 +535,7 @@ private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi st RespCommand.BITCOUNT => NetworkStringBitCount(ref storageApi), RespCommand.BITPOS => NetworkStringBitPosition(ref storageApi), RespCommand.PUBLISH => NetworkPUBLISH(), - RespCommand.PING => parseState.Count == 0 ? NetworkPING() : ProcessArrayCommands(cmd, ref storageApi), + RespCommand.PING => parseState.Count == 0 ? NetworkPING() : NetworkArrayPING(), RespCommand.ASKING => NetworkASKING(), RespCommand.MULTI => NetworkMULTI(), RespCommand.EXEC => NetworkEXEC(), @@ -512,21 +545,8 @@ private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi st RespCommand.RUNTXP => NetworkRUNTXP(), RespCommand.READONLY => NetworkREADONLY(), RespCommand.READWRITE => NetworkREADWRITE(), - RespCommand.COMMAND => NetworkCOMMAND(), - RespCommand.COMMAND_COUNT => NetworkCOMMAND_COUNT(), - RespCommand.COMMAND_INFO => NetworkCOMMAND_INFO(), - RespCommand.ECHO => NetworkECHO(), - RespCommand.INFO => NetworkINFO(), - RespCommand.HELLO => NetworkHELLO(), - RespCommand.TIME => NetworkTIME(), - RespCommand.FLUSHALL => NetworkFLUSHALL(), - RespCommand.FLUSHDB => NetworkFLUSHDB(), - RespCommand.AUTH => NetworkAUTH(), - RespCommand.MEMORY_USAGE => NetworkMemoryUsage(ref storageApi), - RespCommand.ACL_CAT => NetworkAclCat(), - RespCommand.ACL_WHOAMI => NetworkAclWhoAmI(), - RespCommand.ASYNC => NetworkASYNC(), - RespCommand.MIGRATE => NetworkProcessClusterCommand(cmd), + RespCommand.EXPIREAT => NetworkEXPIREAT(RespCommand.EXPIREAT, ref storageApi), + RespCommand.PEXPIREAT => NetworkEXPIREAT(RespCommand.PEXPIREAT, ref storageApi), _ => ProcessArrayCommands(cmd, ref storageApi) }; @@ -537,6 +557,10 @@ private bool ProcessBasicCommands(RespCommand cmd, ref TGarnetApi st private bool ProcessArrayCommands(RespCommand cmd, ref TGarnetApi storageApi) where TGarnetApi : IGarnetApi { + /* + * WARNING: Do not add any command here classified as @slow! + * Only @fast commands otherwise latency tracking will break for NET_RS (check how containsSlowCommand is used). + */ var success = cmd switch { RespCommand.MGET => NetworkMGET(ref storageApi), @@ -547,13 +571,6 @@ private bool ProcessArrayCommands(RespCommand cmd, ref TGarnetApi st RespCommand.WATCH => NetworkWATCH(), RespCommand.WATCH_MS => NetworkWATCH_MS(), RespCommand.WATCH_OS => NetworkWATCH_OS(), - RespCommand.STRLEN => NetworkSTRLEN(ref storageApi), - RespCommand.PING => NetworkArrayPING(), - //General key commands - RespCommand.DBSIZE => NetworkDBSIZE(ref storageApi), - RespCommand.KEYS => NetworkKEYS(ref storageApi), - RespCommand.SCAN => NetworkSCAN(ref storageApi), - RespCommand.TYPE => NetworkTYPE(ref storageApi), // Pub/sub commands RespCommand.SUBSCRIBE => NetworkSUBSCRIBE(), RespCommand.PSUBSCRIBE => NetworkPSUBSCRIBE(), @@ -605,6 +622,7 @@ private bool ProcessArrayCommands(RespCommand cmd, ref TGarnetApi st RespCommand.LPUSH => ListPush(cmd, ref storageApi), RespCommand.LPUSHX => ListPush(cmd, ref storageApi), RespCommand.LPOP => ListPop(cmd, ref storageApi), + RespCommand.LPOS => ListPosition(ref storageApi), RespCommand.RPUSH => ListPush(cmd, ref storageApi), RespCommand.RPUSHX => ListPush(cmd, ref storageApi), RespCommand.RPOP => ListPop(cmd, ref storageApi), @@ -654,10 +672,6 @@ private bool ProcessArrayCommands(RespCommand cmd, ref TGarnetApi st RespCommand.SUNIONSTORE => SetUnionStore(ref storageApi), RespCommand.SDIFF => SetDiff(ref storageApi), RespCommand.SDIFFSTORE => SetDiffStore(ref storageApi), - // Script Commands - RespCommand.SCRIPT => TrySCRIPT(), - RespCommand.EVAL => TryEVAL(), - RespCommand.EVALSHA => TryEVALSHA(), _ => ProcessOtherCommands(cmd, ref storageApi) }; return success; @@ -666,21 +680,62 @@ private bool ProcessArrayCommands(RespCommand cmd, ref TGarnetApi st private bool ProcessOtherCommands(RespCommand command, ref TGarnetApi storageApi) where TGarnetApi : IGarnetApi { - if (command == RespCommand.CLIENT) + /* + * WARNING: Here is safe to add @slow commands (check how containsSlowCommand is used). + */ + containsSlowCommand = true; + var success = command switch { - while (!RespWriteUtils.WriteDirect(CmdStrings.RESP_OK, ref dcurr, dend)) - SendAndReset(); - } - else if (command == RespCommand.SUBSCRIBE) + RespCommand.AUTH => NetworkAUTH(), + RespCommand.MEMORY_USAGE => NetworkMemoryUsage(ref storageApi), + RespCommand.CLIENT_ID => NetworkCLIENTID(), + RespCommand.CLIENT_INFO => NetworkCLIENTINFO(), + RespCommand.CLIENT_LIST => NetworkCLIENTLIST(), + RespCommand.CLIENT_KILL => NetworkCLIENTKILL(), + RespCommand.COMMAND => NetworkCOMMAND(), + RespCommand.COMMAND_COUNT => NetworkCOMMAND_COUNT(), + RespCommand.COMMAND_DOCS => NetworkCOMMAND_DOCS(), + RespCommand.COMMAND_INFO => NetworkCOMMAND_INFO(), + RespCommand.ECHO => NetworkECHO(), + RespCommand.HELLO => NetworkHELLO(), + RespCommand.TIME => NetworkTIME(), + RespCommand.FLUSHALL => NetworkFLUSHALL(), + RespCommand.FLUSHDB => NetworkFLUSHDB(), + RespCommand.ACL_CAT => NetworkAclCat(), + RespCommand.ACL_WHOAMI => NetworkAclWhoAmI(), + RespCommand.ASYNC => NetworkASYNC(), + RespCommand.RUNTXP => NetworkRUNTXP(), + RespCommand.INFO => NetworkINFO(), + RespCommand.CustomTxn => NetworkCustomTxn(), + RespCommand.CustomRawStringCmd => NetworkCustomRawStringCmd(ref storageApi), + RespCommand.CustomObjCmd => NetworkCustomObjCmd(ref storageApi), + RespCommand.CustomProcedure => NetworkCustomProcedure(), + //General key commands + RespCommand.DBSIZE => NetworkDBSIZE(ref storageApi), + RespCommand.KEYS => NetworkKEYS(ref storageApi), + RespCommand.SCAN => NetworkSCAN(ref storageApi), + RespCommand.TYPE => NetworkTYPE(ref storageApi), + // Script Commands + RespCommand.SCRIPT => TrySCRIPT(), + RespCommand.EVAL => TryEVAL(), + RespCommand.EVALSHA => TryEVALSHA(), + _ => Process(command) + }; + + bool NetworkCLIENTID() { - while (!RespWriteUtils.WriteInteger(1, ref dcurr, dend)) + if (parseState.Count != 0) + { + return AbortWithWrongNumberOfArguments("client|id"); + } + + while (!RespWriteUtils.WriteInteger(Id, ref dcurr, dend)) SendAndReset(); + + return true; } - else if (command == RespCommand.RUNTXP) - { - return NetworkRUNTXP(); - } - else if (command == RespCommand.CustomTxn) + + bool NetworkCustomTxn() { if (!IsCommandArityValid(currentCustomTransaction.NameStr, parseState.Count)) { @@ -691,32 +746,10 @@ private bool ProcessOtherCommands(RespCommand command, ref TGarnetAp // Perform the operation TryTransactionProc(currentCustomTransaction.id, recvBufferPtr + readHead, recvBufferPtr + endReadHead, customCommandManagerSession.GetCustomTransactionProcedure(currentCustomTransaction.id, txnManager, scratchBufferManager).Item1); currentCustomTransaction = null; + return true; } - else if (command == RespCommand.CustomRawStringCmd) - { - if (!IsCommandArityValid(currentCustomRawStringCommand.NameStr, parseState.Count)) - { - currentCustomRawStringCommand = null; - return true; - } - - // Perform the operation - TryCustomRawStringCommand(recvBufferPtr + readHead, recvBufferPtr + endReadHead, currentCustomRawStringCommand.GetRespCommand(), currentCustomRawStringCommand.expirationTicks, currentCustomRawStringCommand.type, ref storageApi); - currentCustomRawStringCommand = null; - } - else if (command == RespCommand.CustomObjCmd) - { - if (!IsCommandArityValid(currentCustomObjectCommand.NameStr, parseState.Count)) - { - currentCustomObjectCommand = null; - return true; - } - // Perform the operation - TryCustomObjectCommand(recvBufferPtr + readHead, recvBufferPtr + endReadHead, currentCustomObjectCommand.GetRespCommand(), currentCustomObjectCommand.subid, currentCustomObjectCommand.type, ref storageApi); - currentCustomObjectCommand = null; - } - else if (command == RespCommand.CustomProcedure) + bool NetworkCustomProcedure() { if (!IsCommandArityValid(currentCustomProcedure.NameStr, parseState.Count)) { @@ -728,12 +761,46 @@ private bool ProcessOtherCommands(RespCommand command, ref TGarnetAp currentCustomProcedure.CustomProcedureImpl); currentCustomProcedure = null; + return true; } - else + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + bool Process(RespCommand command) { ProcessAdminCommands(command); return true; } + + return success; + } + + private bool NetworkCustomRawStringCmd(ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + if (!IsCommandArityValid(currentCustomRawStringCommand.NameStr, parseState.Count)) + { + currentCustomRawStringCommand = null; + return true; + } + + // Perform the operation + TryCustomRawStringCommand(recvBufferPtr + readHead, recvBufferPtr + endReadHead, currentCustomRawStringCommand.GetRespCommand(), currentCustomRawStringCommand.expirationTicks, currentCustomRawStringCommand.type, ref storageApi); + currentCustomRawStringCommand = null; + return true; + } + + bool NetworkCustomObjCmd(ref TGarnetApi storageApi) + where TGarnetApi : IGarnetApi + { + if (!IsCommandArityValid(currentCustomObjectCommand.NameStr, parseState.Count)) + { + currentCustomObjectCommand = null; + return true; + } + + // Perform the operation + TryCustomObjectCommand(recvBufferPtr + readHead, recvBufferPtr + endReadHead, currentCustomObjectCommand.GetRespCommand(), currentCustomObjectCommand.subid, currentCustomObjectCommand.type, ref storageApi); + currentCustomObjectCommand = null; return true; } @@ -826,6 +893,16 @@ public ArgSlice GetCommandAsArgSlice(out bool success) return result; } + /// + /// Attempt to kill this session. + /// + /// Returns true if this call actually kills the underlying network connection. + /// + /// Subsequent calls will return false. + /// + public bool TryKill() + => networkSender.TryClose(); + [MethodImpl(MethodImplOptions.AggressiveInlining)] private unsafe bool Write(ref Status s, ref byte* dst, int length) { diff --git a/libs/server/Resp/RespServerSessionSlotVerify.cs b/libs/server/Resp/RespServerSessionSlotVerify.cs index 9497329c98..44b7f60d43 100644 --- a/libs/server/Resp/RespServerSessionSlotVerify.cs +++ b/libs/server/Resp/RespServerSessionSlotVerify.cs @@ -2,6 +2,7 @@ // Licensed under the MIT license. using System; +using System.Diagnostics; using Garnet.common; namespace Garnet.server @@ -33,9 +34,7 @@ bool NetworkKeyArraySlotVerify(Span keys, bool readOnly, int count = - bool CanServeSlot(RespCommand cmd) { - // If cluster is disable all commands - if (clusterSession == null) - return true; + Debug.Assert(clusterSession != null); // Verify slot for command if it falls into data command category if (!cmd.IsDataCommand()) diff --git a/libs/server/Servers/GarnetServerBase.cs b/libs/server/Servers/GarnetServerBase.cs index 7c6a229a0f..5911207b83 100644 --- a/libs/server/Servers/GarnetServerBase.cs +++ b/libs/server/Servers/GarnetServerBase.cs @@ -116,7 +116,7 @@ public virtual IEnumerable ActiveConsumers() /// public GarnetServerBase(string address, int port, int networkBufferSize, ILogger logger = null) { - this.logger = logger == null ? null : new SessionLogger(logger, $"[{address ?? StoreWrapper.GetIp()}:{port}] "); + this.logger = logger; this.address = address; this.port = port; this.networkBufferSize = networkBufferSize; @@ -148,6 +148,14 @@ public void Unregister(WireFormat wireFormat, out ISessionProvider provider) public bool AddSession(WireFormat protocol, ref ISessionProvider provider, INetworkSender networkSender, out IMessageConsumer session) { session = provider.GetSession(protocol, networkSender); + + // RESP sessions need to be able to enumerate other sessions. + // So stash a reference back to the GarnetServer if we created a RespServerSessions. + if (session is RespServerSession respSession) + { + respSession.Server = this; + } + return true; } diff --git a/libs/server/Servers/GarnetServerOptions.cs b/libs/server/Servers/GarnetServerOptions.cs index 492da92d2b..1d1f938902 100644 --- a/libs/server/Servers/GarnetServerOptions.cs +++ b/libs/server/Servers/GarnetServerOptions.cs @@ -2,6 +2,7 @@ // Licensed under the MIT license. using System; +using System.Collections.Generic; using System.IO; using Garnet.server.Auth.Settings; using Garnet.server.TLS; @@ -21,9 +22,9 @@ public class GarnetServerOptions : ServerOptions public bool DisableObjects = false; /// - /// Total memory size limit of object store including heap memory of objects. + /// Heap memory size limit of object store. /// - public string ObjectStoreTotalMemorySize = ""; + public string ObjectStoreHeapMemorySize = ""; /// /// Object store log memory used in bytes excluding heap memory. @@ -92,6 +93,16 @@ public class GarnetServerOptions : ServerOptions /// public string AofPageSize = "4m"; + /// + /// AOF replication (safe tail address) refresh frequency in milliseconds. 0 = auto refresh after every enqueue. + /// + public int AofReplicationRefreshFrequencyMs = 10; + + /// + /// Subscriber (safe tail address) refresh frequency in milliseconds (for pub-sub). 0 = auto refresh after every enqueue. + /// + public int SubscriberRefreshFrequencyMs = 0; + /// /// Write ahead logging (append-only file) commit issue frequency in milliseconds. /// 0 = issue an immediate commit per operation @@ -358,6 +369,8 @@ public class GarnetServerOptions : ServerOptions /// public bool ExtensionAllowUnsignedAssemblies; + public IEnumerable LoadModuleCS; + /// /// Constructor /// @@ -515,7 +528,7 @@ public static int MemorySizeBits(string memorySize, string storePageSize, out in /// /// Get KVSettings for the object store log /// - public KVSettings GetObjectStoreSettings(ILogger logger, out long objTotalMemorySize) + public KVSettings GetObjectStoreSettings(ILogger logger, out long objHeapMemorySize) { if (ObjectStoreMutablePercent is < 10 or > 95) throw new Exception("ObjectStoreMutablePercent must be between 10 and 95"); @@ -563,8 +576,8 @@ public KVSettings GetObjectStoreSettings(ILogger logger, } logger?.LogInformation("[Object Store] Using log mutable percentage of {ObjectStoreMutablePercent}%", ObjectStoreMutablePercent); - objTotalMemorySize = ParseSize(ObjectStoreTotalMemorySize); - logger?.LogInformation("[Object Store] Total memory size including heap objects is {totalMemorySize}", (objTotalMemorySize > 0 ? PrettySize(objTotalMemorySize) : "unlimited")); + objHeapMemorySize = ParseSize(ObjectStoreHeapMemorySize); + logger?.LogInformation("[Object Store] Total memory size including heap objects is {totalMemorySize}", (objHeapMemorySize > 0 ? PrettySize(objHeapMemorySize) : "unlimited")); if (EnableStorageTier) { @@ -616,7 +629,7 @@ public void GetAofSettings(out TsavoriteLogSettings tsavoriteLogSettings) PageSizeBits = AofPageSizeBits(), LogDevice = GetAofDevice(), TryRecoverLatest = false, - AutoRefreshSafeTailAddress = true, + SafeTailRefreshFrequencyMs = EnableCluster ? AofReplicationRefreshFrequencyMs : -1, FastCommitMode = EnableFastCommit, AutoCommit = CommitFrequencyMs == 0, MutableFraction = 0.9, @@ -716,9 +729,9 @@ public int ObjectStoreSegmentSizeBits() /// IDevice GetAofDevice() { - if (!MainMemoryReplication && UseAofNullDevice) - throw new Exception("Cannot use null device for AOF when not using main memory replication"); - if (MainMemoryReplication && UseAofNullDevice) return new NullDevice(); + if (UseAofNullDevice && EnableCluster && !MainMemoryReplication) + throw new Exception("Cannot use null device for AOF when cluster is enabled and you are not using main memory replication"); + if (UseAofNullDevice) return new NullDevice(); else return GetInitializedDeviceFactory(CheckpointDir).Get(new FileDescriptor("AOF", "aof.log")); } diff --git a/libs/server/Servers/GarnetServerTcp.cs b/libs/server/Servers/GarnetServerTcp.cs index a24a85ee8c..017248d122 100644 --- a/libs/server/Servers/GarnetServerTcp.cs +++ b/libs/server/Servers/GarnetServerTcp.cs @@ -22,8 +22,18 @@ public class GarnetServerTcp : GarnetServerBase, IServerHook readonly Socket servSocket; readonly IGarnetTlsOptions tlsOptions; readonly int networkSendThrottleMax; + readonly NetworkBufferSettings networkBufferSettings; readonly LimitedFixedBufferPool networkPool; + public IPEndPoint GetEndPoint + { + get + { + var ip = string.IsNullOrEmpty(Address) ? IPAddress.Any : IPAddress.Parse(Address); + return new IPEndPoint(ip, Port); + } + } + /// /// Get active consumers /// @@ -64,9 +74,10 @@ public GarnetServerTcp(string address, int port, int networkBufferSize = default { this.tlsOptions = tlsOptions; this.networkSendThrottleMax = networkSendThrottleMax; - this.networkPool = new LimitedFixedBufferPool(BufferSizeUtils.ServerBufferSize(new MaxSizeSettings()), logger: logger); - var ip = string.IsNullOrEmpty(Address) ? IPAddress.Any : IPAddress.Parse(Address); - servSocket = new Socket(ip.AddressFamily, SocketType.Stream, ProtocolType.Tcp); + var serverBufferSize = BufferSizeUtils.ServerBufferSize(new MaxSizeSettings()); + this.networkBufferSettings = new NetworkBufferSettings(serverBufferSize, serverBufferSize); + this.networkPool = networkBufferSettings.CreateBufferPool(logger: logger); + servSocket = new Socket(GetEndPoint.AddressFamily, SocketType.Stream, ProtocolType.Tcp); acceptEventArg = new SocketAsyncEventArgs(); acceptEventArg.Completed += AcceptEventArg_Completed; } @@ -80,7 +91,7 @@ public override void Dispose() servSocket.Dispose(); acceptEventArg.UserToken = null; acceptEventArg.Dispose(); - networkPool.Dispose(); + networkPool?.Dispose(); } /// @@ -88,8 +99,7 @@ public override void Dispose() /// public override void Start() { - var ip = Address == null ? IPAddress.Any : IPAddress.Parse(Address); - var endPoint = new IPEndPoint(ip, Port); + var endPoint = GetEndPoint; servSocket.Bind(endPoint); servSocket.Listen(512); if (!servSocket.AcceptAsync(acceptEventArg)) @@ -132,7 +142,7 @@ private unsafe bool HandleNewConnection(SocketAsyncEventArgs e) { try { - handler = new ServerTcpNetworkHandler(this, e.AcceptSocket, networkPool, tlsOptions != null, networkSendThrottleMax, logger); + handler = new ServerTcpNetworkHandler(this, e.AcceptSocket, networkBufferSettings, networkPool, tlsOptions != null, networkSendThrottleMax: networkSendThrottleMax, logger: logger); if (!activeHandlers.TryAdd(handler, default)) throw new Exception("Unable to add handler to dictionary"); @@ -198,5 +208,9 @@ public void DisposeMessageConsumer(INetworkHandler session) } } } + + public void Purge() => networkPool.Purge(); + + public string GetBufferPoolStats() => networkPool.GetStats(); } } \ No newline at end of file diff --git a/libs/server/Servers/RegisterApi.cs b/libs/server/Servers/RegisterApi.cs index 766c01aa9d..86437d5c2b 100644 --- a/libs/server/Servers/RegisterApi.cs +++ b/libs/server/Servers/RegisterApi.cs @@ -27,6 +27,7 @@ public RegisterApi(GarnetProvider provider) /// Type of command (e.g., read) /// Custom functions for command logic /// RESP command info + /// RESP command docs /// /// Expiration for value, in ticks. /// -1 => remove existing expiration metadata; @@ -34,8 +35,8 @@ public RegisterApi(GarnetProvider provider) /// >0 => set expiration to given value. /// /// ID of the registered command - public int NewCommand(string name, CommandType type, CustomRawStringFunctions customFunctions, RespCommandsInfo commandInfo = null, long expirationTicks = 0) - => provider.StoreWrapper.customCommandManager.Register(name, type, customFunctions, commandInfo, expirationTicks); + public int NewCommand(string name, CommandType type, CustomRawStringFunctions customFunctions, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null, long expirationTicks = 0) + => provider.StoreWrapper.customCommandManager.Register(name, type, customFunctions, commandInfo, commandDocs, expirationTicks); /// /// Register transaction procedure with Garnet @@ -43,9 +44,10 @@ public int NewCommand(string name, CommandType type, CustomRawStringFunctions cu /// Name of command /// Custom stored procedure /// RESP command info + /// RESP command docs /// ID of the registered command - public int NewTransactionProc(string name, Func proc, RespCommandsInfo commandInfo = null) - => provider.StoreWrapper.customCommandManager.Register(name, proc, commandInfo); + public int NewTransactionProc(string name, Func proc, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) + => provider.StoreWrapper.customCommandManager.Register(name, proc, commandInfo, commandDocs); /// /// Register object type with server @@ -71,9 +73,10 @@ public void NewType(int type, CustomObjectFactory factory) /// Custom factory for object /// Custom object command implementation /// RESP command info + /// RESP command docs /// ID of the registered command - public (int objectTypeId, int subCommandId) NewCommand(string name, CommandType commandType, CustomObjectFactory factory, CustomObjectFunctions customObjectFunctions, RespCommandsInfo commandInfo = null) - => provider.StoreWrapper.customCommandManager.Register(name, commandType, factory, customObjectFunctions, commandInfo); + public (int objectTypeId, int subCommandId) NewCommand(string name, CommandType commandType, CustomObjectFactory factory, CustomObjectFunctions customObjectFunctions, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) + => provider.StoreWrapper.customCommandManager.Register(name, commandType, factory, customObjectFunctions, commandInfo, commandDocs); /// /// Register custom procedure with Garnet @@ -81,8 +84,9 @@ public void NewType(int type, CustomObjectFactory factory) /// /// /// + /// /// - public int NewProcedure(string name, CustomProcedure customProcedure, RespCommandsInfo commandInfo = null) - => provider.StoreWrapper.customCommandManager.Register(name, customProcedure, commandInfo); + public int NewProcedure(string name, CustomProcedure customProcedure, RespCommandsInfo commandInfo = null, RespCommandDocs commandDocs = null) + => provider.StoreWrapper.customCommandManager.Register(name, customProcedure, commandInfo, commandDocs); } } \ No newline at end of file diff --git a/libs/server/Servers/ServerTcpNetworkHandler.cs b/libs/server/Servers/ServerTcpNetworkHandler.cs index 87c4bb7c74..1f092e5622 100644 --- a/libs/server/Servers/ServerTcpNetworkHandler.cs +++ b/libs/server/Servers/ServerTcpNetworkHandler.cs @@ -9,8 +9,8 @@ namespace Garnet.server { internal sealed class ServerTcpNetworkHandler : TcpNetworkHandler { - public ServerTcpNetworkHandler(GarnetServerTcp serverHook, Socket socket, LimitedFixedBufferPool networkPool, bool useTLS, int networkSendThrottleMax, ILogger logger = null) - : base(serverHook, socket, networkPool, useTLS, null, networkSendThrottleMax, logger) + public ServerTcpNetworkHandler(GarnetServerTcp serverHook, Socket socket, NetworkBufferSettings networkBufferSettings, LimitedFixedBufferPool networkPool, bool useTLS, int networkSendThrottleMax = 8, ILogger logger = null) + : base(serverHook, socket, networkBufferSettings, networkPool, useTLS, null, networkSendThrottleMax: networkSendThrottleMax, logger: logger) { } } diff --git a/libs/server/Servers/StoreApi.cs b/libs/server/Servers/StoreApi.cs index 10a09071d3..f9d67cc366 100644 --- a/libs/server/Servers/StoreApi.cs +++ b/libs/server/Servers/StoreApi.cs @@ -41,7 +41,7 @@ public StoreApi(StoreWrapper storeWrapper) /// /// Commit AOF /// - public ValueTask CommitAOFAsync(CancellationToken token) => storeWrapper.appendOnlyFile != null ? storeWrapper.appendOnlyFile.CommitAsync(token) : ValueTask.CompletedTask; + public ValueTask CommitAOFAsync(CancellationToken token) => storeWrapper.appendOnlyFile != null ? storeWrapper.appendOnlyFile.CommitAsync(null, token) : ValueTask.CompletedTask; /// /// Flush DB (delete all keys) diff --git a/libs/server/Storage/Functions/MainStore/PrivateMethods.cs b/libs/server/Storage/Functions/MainStore/PrivateMethods.cs index c24563ef50..5eb0835968 100644 --- a/libs/server/Storage/Functions/MainStore/PrivateMethods.cs +++ b/libs/server/Storage/Functions/MainStore/PrivateMethods.cs @@ -217,6 +217,17 @@ void CopyRespToWithInput(ref SpanByte input, ref SpanByte value, ref SpanByteAnd (start, end) = NormalizeRange(start, end, len); CopyRespTo(ref value, ref dst, start, end); return; + + case RespCommand.EXPIRETIME: + var expireTime = ConvertUtils.UnixTimeInSecondsFromTicks(value.MetadataSize > 0 ? value.ExtraMetadata : -1); + CopyRespNumber(expireTime, ref dst); + return; + + case RespCommand.PEXPIRETIME: + var pexpireTime = ConvertUtils.UnixTimeInMillisecondsFromTicks(value.MetadataSize > 0 ? value.ExtraMetadata : -1); + CopyRespNumber(pexpireTime, ref dst); + return; + default: throw new GarnetException("Unsupported operation on input"); } @@ -238,6 +249,7 @@ bool EvaluateExpireInPlace(ExpireOption optionType, bool expiryExists, ref SpanB o->result1 = 1; break; case ExpireOption.GT: + case ExpireOption.XXGT: bool replace = input.ExtraMetadata < value.ExtraMetadata; value.ExtraMetadata = replace ? value.ExtraMetadata : input.ExtraMetadata; if (replace) @@ -246,6 +258,7 @@ bool EvaluateExpireInPlace(ExpireOption optionType, bool expiryExists, ref SpanB o->result1 = 1; break; case ExpireOption.LT: + case ExpireOption.XXLT: replace = input.ExtraMetadata > value.ExtraMetadata; value.ExtraMetadata = replace ? value.ExtraMetadata : input.ExtraMetadata; if (replace) @@ -264,10 +277,12 @@ bool EvaluateExpireInPlace(ExpireOption optionType, bool expiryExists, ref SpanB { case ExpireOption.NX: case ExpireOption.None: + case ExpireOption.LT: // If expiry doesn't exist, LT should treat the current expiration as infinite return false; case ExpireOption.XX: case ExpireOption.GT: - case ExpireOption.LT: + case ExpireOption.XXGT: + case ExpireOption.XXLT: o->result1 = 0; return true; default: @@ -293,6 +308,7 @@ void EvaluateExpireCopyUpdate(ExpireOption optionType, bool expiryExists, ref Sp o->result1 = 1; break; case ExpireOption.GT: + case ExpireOption.XXGT: oldValue.AsReadOnlySpan().CopyTo(newValue.AsSpan()); bool replace = input.ExtraMetadata < oldValue.ExtraMetadata; newValue.ExtraMetadata = replace ? oldValue.ExtraMetadata : input.ExtraMetadata; @@ -302,6 +318,7 @@ void EvaluateExpireCopyUpdate(ExpireOption optionType, bool expiryExists, ref Sp o->result1 = 1; break; case ExpireOption.LT: + case ExpireOption.XXLT: oldValue.AsReadOnlySpan().CopyTo(newValue.AsSpan()); replace = input.ExtraMetadata > oldValue.ExtraMetadata; newValue.ExtraMetadata = replace ? oldValue.ExtraMetadata : input.ExtraMetadata; @@ -318,13 +335,15 @@ void EvaluateExpireCopyUpdate(ExpireOption optionType, bool expiryExists, ref Sp { case ExpireOption.NX: case ExpireOption.None: + case ExpireOption.LT: // If expiry doesn't exist, LT should treat the current expiration as infinite newValue.ExtraMetadata = input.ExtraMetadata; oldValue.AsReadOnlySpan().CopyTo(newValue.AsSpan()); o->result1 = 1; break; case ExpireOption.XX: case ExpireOption.GT: - case ExpireOption.LT: + case ExpireOption.XXGT: + case ExpireOption.XXLT: oldValue.AsReadOnlySpan().CopyTo(newValue.AsSpan()); o->result1 = 0; break; diff --git a/libs/server/Storage/Functions/ObjectStore/PrivateMethods.cs b/libs/server/Storage/Functions/ObjectStore/PrivateMethods.cs index ef5c82cc80..8a96900757 100644 --- a/libs/server/Storage/Functions/ObjectStore/PrivateMethods.cs +++ b/libs/server/Storage/Functions/ObjectStore/PrivateMethods.cs @@ -150,6 +150,7 @@ static bool EvaluateObjectExpireInPlace(ExpireOption optionType, bool expiryExis o->result1 = 1; break; case ExpireOption.GT: + case ExpireOption.XXGT: bool replace = expiration < value.Expiration; value.Expiration = replace ? value.Expiration : expiration; if (replace) @@ -158,6 +159,7 @@ static bool EvaluateObjectExpireInPlace(ExpireOption optionType, bool expiryExis o->result1 = 1; break; case ExpireOption.LT: + case ExpireOption.XXLT: replace = expiration > value.Expiration; value.Expiration = replace ? value.Expiration : expiration; if (replace) @@ -175,12 +177,14 @@ static bool EvaluateObjectExpireInPlace(ExpireOption optionType, bool expiryExis { case ExpireOption.NX: case ExpireOption.None: + case ExpireOption.LT: // If expiry doesn't exist, LT should treat the current expiration as infinite value.Expiration = expiration; o->result1 = 1; break; case ExpireOption.XX: case ExpireOption.GT: - case ExpireOption.LT: + case ExpireOption.XXGT: + case ExpireOption.XXLT: o->result1 = 0; break; default: diff --git a/libs/server/Storage/Functions/ObjectStore/RMWMethods.cs b/libs/server/Storage/Functions/ObjectStore/RMWMethods.cs index 0bb36fe293..866b90c028 100644 --- a/libs/server/Storage/Functions/ObjectStore/RMWMethods.cs +++ b/libs/server/Storage/Functions/ObjectStore/RMWMethods.cs @@ -171,6 +171,7 @@ public bool PostCopyUpdater(ref byte[] key, ref ObjectInput input, ref IGarnetOb { // We're performing the object update here (and not in CopyUpdater) so that we are guaranteed that // the record was CASed into the hash chain before it gets modified + var oldValueSize = oldValue.Size; oldValue.CopyUpdate(ref oldValue, ref value, rmwInfo.RecordInfo.IsInNewVersion); functionsState.watchVersionMap.IncrementVersion(rmwInfo.KeyHash); @@ -220,7 +221,9 @@ public bool PostCopyUpdater(ref byte[] key, ref ObjectInput input, ref IGarnetOb } } - functionsState.objectStoreSizeTracker?.AddTrackedSize(MemoryUtils.CalculateKeyValueSize(key, value)); + // If oldValue has been set to null, subtract it's size from the tracked heap size + var sizeAdjustment = oldValue == null ? value.Size - oldValueSize : value.Size; + functionsState.objectStoreSizeTracker?.AddTrackedSize(sizeAdjustment); if (functionsState.appendOnlyFile != null) WriteLogRMW(ref key, ref input, rmwInfo.Version, rmwInfo.SessionID); diff --git a/libs/server/Storage/Functions/ObjectStore/ReadMethods.cs b/libs/server/Storage/Functions/ObjectStore/ReadMethods.cs index 2cc20b0dc0..b360f7849a 100644 --- a/libs/server/Storage/Functions/ObjectStore/ReadMethods.cs +++ b/libs/server/Storage/Functions/ObjectStore/ReadMethods.cs @@ -25,27 +25,40 @@ public bool SingleReader(ref byte[] key, ref ObjectInput input, ref IGarnetObjec if (input.header.type != 0) { - if (input.header.type == GarnetObjectType.Ttl || input.header.type == GarnetObjectType.PTtl) // TTL command + switch (input.header.type) { - var ttlValue = input.header.type == GarnetObjectType.Ttl ? - ConvertUtils.SecondsFromDiffUtcNowTicks(value.Expiration > 0 ? value.Expiration : -1) : - ConvertUtils.MillisecondsFromDiffUtcNowTicks(value.Expiration > 0 ? value.Expiration : -1); - CopyRespNumber(ttlValue, ref dst.spanByteAndMemory); - return true; - } + case GarnetObjectType.Ttl: + var ttlValue = ConvertUtils.SecondsFromDiffUtcNowTicks(value.Expiration > 0 ? value.Expiration : -1); + CopyRespNumber(ttlValue, ref dst.spanByteAndMemory); + return true; + case GarnetObjectType.PTtl: + ttlValue = ConvertUtils.MillisecondsFromDiffUtcNowTicks(value.Expiration > 0 ? value.Expiration : -1); + CopyRespNumber(ttlValue, ref dst.spanByteAndMemory); + return true; + + case GarnetObjectType.Expiretime: + var expireTime = ConvertUtils.UnixTimeInSecondsFromTicks(value.Expiration > 0 ? value.Expiration : -1); + CopyRespNumber(expireTime, ref dst.spanByteAndMemory); + return true; + case GarnetObjectType.PExpiretime: + expireTime = ConvertUtils.UnixTimeInMillisecondsFromTicks(value.Expiration > 0 ? value.Expiration : -1); + CopyRespNumber(expireTime, ref dst.spanByteAndMemory); + return true; - if ((byte)input.header.type < CustomCommandManager.StartOffset) - return value.Operate(ref input, ref dst.spanByteAndMemory, out _, out _); + default: + if ((byte)input.header.type < CustomCommandManager.StartOffset) + return value.Operate(ref input, ref dst.spanByteAndMemory, out _, out _); - if (IncorrectObjectType(ref input, value, ref dst.spanByteAndMemory)) - return true; + if (IncorrectObjectType(ref input, value, ref dst.spanByteAndMemory)) + return true; - (IMemoryOwner Memory, int Length) outp = (dst.spanByteAndMemory.Memory, 0); - var customObjectCommand = GetCustomObjectCommand(ref input, input.header.type); - var result = customObjectCommand.Reader(key, ref input, value, ref outp, ref readInfo); - dst.spanByteAndMemory.Memory = outp.Memory; - dst.spanByteAndMemory.Length = outp.Length; - return result; + (IMemoryOwner Memory, int Length) outp = (dst.spanByteAndMemory.Memory, 0); + var customObjectCommand = GetCustomObjectCommand(ref input, input.header.type); + var result = customObjectCommand.Reader(key, ref input, value, ref outp, ref readInfo); + dst.spanByteAndMemory.Memory = outp.Memory; + dst.spanByteAndMemory.Length = outp.Length; + return result; + } } dst.garnetObject = value; diff --git a/libs/server/Storage/Session/Common/ArrayKeyIterationFunctions.cs b/libs/server/Storage/Session/Common/ArrayKeyIterationFunctions.cs index aaa5091e3d..25aedc1ec4 100644 --- a/libs/server/Storage/Session/Common/ArrayKeyIterationFunctions.cs +++ b/libs/server/Storage/Session/Common/ArrayKeyIterationFunctions.cs @@ -10,7 +10,7 @@ namespace Garnet.server { sealed partial class StorageSession : IDisposable { - // These are classes so instantiate once and re-initialize + // These contain classes so instantiate once and re-initialize private ArrayKeyIterationFunctions.MainStoreGetDBSize mainStoreDbSizeFuncs; private ArrayKeyIterationFunctions.ObjectStoreGetDBSize objectStoreDbSizeFuncs; @@ -111,7 +111,7 @@ internal unsafe bool DbScan(ArgSlice patternB, bool allKeys, long cursor, out lo } /// - /// Iterate the contents of the main store + /// Iterate the contents of the main store (push-based) /// /// /// @@ -119,7 +119,7 @@ internal unsafe bool DbScan(ArgSlice patternB, bool allKeys, long cursor, out lo /// internal bool IterateMainStore(ref TScanFunctions scanFunctions, long untilAddress = -1) where TScanFunctions : IScanIteratorFunctions - => basicContext.Session.Iterate(ref scanFunctions, untilAddress); + => basicContext.Session.IterateLookup(ref scanFunctions, untilAddress); /// /// Iterate the contents of the main store (pull based) @@ -128,7 +128,7 @@ internal ITsavoriteScanIterator IterateMainStore() => basicContext.Session.Iterate(); /// - /// Iterate the contents of the object store + /// Iterate the contents of the object store (push-based) /// /// /// @@ -136,7 +136,7 @@ internal ITsavoriteScanIterator IterateMainStore() /// internal bool IterateObjectStore(ref TScanFunctions scanFunctions, long untilAddress = -1) where TScanFunctions : IScanIteratorFunctions - => objectStoreBasicContext.Session.Iterate(ref scanFunctions, untilAddress); + => objectStoreBasicContext.Session.IterateLookup(ref scanFunctions, untilAddress); /// /// Iterate the contents of the main store (pull based) @@ -180,14 +180,14 @@ internal int DbSize() mainStoreDbSizeFuncs.Initialize(); long cursor = 0; basicContext.Session.ScanCursor(ref cursor, long.MaxValue, mainStoreDbSizeFuncs); - int count = mainStoreDbSizeFuncs.count; + int count = mainStoreDbSizeFuncs.Count; if (objectStoreBasicContext.Session != null) { objectStoreDbSizeFuncs ??= new(); objectStoreDbSizeFuncs.Initialize(); cursor = 0; objectStoreBasicContext.Session.ScanCursor(ref cursor, long.MaxValue, objectStoreDbSizeFuncs); - count += objectStoreDbSizeFuncs.count; + count += objectStoreDbSizeFuncs.Count; } return count; @@ -195,25 +195,38 @@ internal int DbSize() internal static unsafe class ArrayKeyIterationFunctions { - internal sealed class MainStoreGetDBKeys : IScanIteratorFunctions + internal class GetDBKeysInfo { - List keys; - byte* patternB; - int patternLength; + // This must be a class as it is passed through pending IO operations, so it is wrapped by higher structures for inlining as a generic type arg. + internal List keys; + internal byte* patternB; + internal int patternLength; + internal Type matchType; - internal void Initialize(List keys, byte* patternB, int length) + internal void Initialize(List keys, byte* patternB, int length, Type matchType = null) { this.keys = keys; this.patternB = patternB; this.patternLength = length; + this.matchType = matchType; } + } + + internal sealed class MainStoreGetDBKeys : IScanIteratorFunctions + { + private readonly GetDBKeysInfo info; + + internal MainStoreGetDBKeys() => info = new(); + + internal void Initialize(List keys, byte* patternB, int length) + => info.Initialize(keys, patternB, length); public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) => ConcurrentReader(ref key, ref value, recordMetadata, numberOfRecords, out cursorRecordResult); public bool ConcurrentReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) { - if ((patternB != null && !GlobUtils.Match(patternB, patternLength, key.ToPointer(), key.Length, true)) + if ((info.patternB != null && !GlobUtils.Match(info.patternB, info.patternLength, key.ToPointer(), key.Length, true)) || (value.MetadataSize != 0 && MainSessionFunctions.CheckExpiry(ref value))) { cursorRecordResult = CursorRecordResult.Skip; @@ -221,7 +234,7 @@ public bool ConcurrentReader(ref SpanByte key, ref SpanByte value, RecordMetadat else { cursorRecordResult = CursorRecordResult.Accept; - keys.Add(key.ToByteArray()); + info.keys.Add(key.ToByteArray()); } return true; } @@ -233,18 +246,12 @@ public void OnException(Exception exception, long numberOfRecords) { } internal sealed class ObjectStoreGetDBKeys : IScanIteratorFunctions { - List keys; - byte* patternB; - int patternLength; - private Type matchType; + private readonly GetDBKeysInfo info; + + internal ObjectStoreGetDBKeys() => info = new(); internal void Initialize(List keys, byte* patternB, int length, Type matchType = null) - { - this.keys = keys; - this.patternB = patternB; - this.patternLength = length; - this.matchType = matchType; - } + => info.Initialize(keys, patternB, length, matchType); public bool SingleReader(ref byte[] key, ref IGarnetObject value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) => ConcurrentReader(ref key, ref value, recordMetadata, numberOfRecords, out cursorRecordResult); @@ -257,11 +264,11 @@ public bool ConcurrentReader(ref byte[] key, ref IGarnetObject value, RecordMeta return true; } - if (patternB != null) + if (info.patternB != null) { fixed (byte* keyPtr = key) { - if (!GlobUtils.Match(patternB, patternLength, keyPtr, key.Length, true)) + if (!GlobUtils.Match(info.patternB, info.patternLength, keyPtr, key.Length, true)) { cursorRecordResult = CursorRecordResult.Skip; return true; @@ -269,13 +276,13 @@ public bool ConcurrentReader(ref byte[] key, ref IGarnetObject value, RecordMeta } } - if (matchType != null && value.GetType() != matchType) + if (info.matchType != null && value.GetType() != info.matchType) { cursorRecordResult = CursorRecordResult.Skip; return true; } - keys.Add(key); + info.keys.Add(key); cursorRecordResult = CursorRecordResult.Accept; return true; } @@ -285,12 +292,23 @@ public void OnStop(bool completed, long numberOfRecords) { } public void OnException(Exception exception, long numberOfRecords) { } } - internal sealed class MainStoreGetDBSize : IScanIteratorFunctions + internal class GetDBSizeInfo { - // This must be a class as it is passed through pending IO operations + // This must be a class as it is passed through pending IO operations, so it is wrapped by higher structures for inlining as a generic type arg. internal int count; internal void Initialize() => count = 0; + } + + internal sealed class MainStoreGetDBSize : IScanIteratorFunctions + { + private readonly GetDBSizeInfo info; + + internal int Count => info.count; + + internal MainStoreGetDBSize() => info = new(); + + internal void Initialize() => info.Initialize(); public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) { @@ -299,7 +317,7 @@ public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata re else { cursorRecordResult = CursorRecordResult.Accept; - ++count; + ++info.count; } return true; } @@ -312,10 +330,13 @@ public void OnException(Exception exception, long numberOfRecords) { } internal sealed class ObjectStoreGetDBSize : IScanIteratorFunctions { - // This must be a class as it is passed through pending IO operations - internal int count; + private readonly GetDBSizeInfo info; - internal void Initialize() => count = 0; + internal int Count => info.count; + + internal ObjectStoreGetDBSize() => info = new(); + + internal void Initialize() => info.Initialize(); public bool SingleReader(ref byte[] key, ref IGarnetObject value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) { @@ -324,7 +345,7 @@ public bool SingleReader(ref byte[] key, ref IGarnetObject value, RecordMetadata else { cursorRecordResult = CursorRecordResult.Accept; - ++count; + ++info.count; } return true; } diff --git a/libs/server/Storage/Session/MainStore/MainStoreOps.cs b/libs/server/Storage/Session/MainStore/MainStoreOps.cs index f979bcc0f6..344c4a72b4 100644 --- a/libs/server/Storage/Session/MainStore/MainStoreOps.cs +++ b/libs/server/Storage/Session/MainStore/MainStoreOps.cs @@ -302,6 +302,71 @@ public unsafe GarnetStatus TTL(ref SpanByte key, Store return GarnetStatus.NOTFOUND; } + /// + /// Get the absolute Unix timestamp at which the given key will expire. + /// + /// + /// + /// The key to get the Unix timestamp. + /// The store to operate on + /// Span to allocate the output of the operation + /// Basic Context of the store + /// Object Context of the store + /// when true the command to execute is PEXPIRETIME. + /// Returns the absolute Unix timestamp (since January 1, 1970) in seconds or milliseconds at which the given key will expire. + public unsafe GarnetStatus EXPIRETIME(ref SpanByte key, StoreType storeType, ref SpanByteAndMemory output, ref TContext context, ref TObjectContext objectContext, bool milliseconds = false) + where TContext : ITsavoriteContext + where TObjectContext : ITsavoriteContext + { + int inputSize = sizeof(int) + RespInputHeader.Size; + byte* pbCmdInput = stackalloc byte[inputSize]; + + byte* pcurr = pbCmdInput; + *(int*)pcurr = inputSize - sizeof(int); + pcurr += sizeof(int); + (*(RespInputHeader*)pcurr).cmd = milliseconds ? RespCommand.PEXPIRETIME : RespCommand.EXPIRETIME; + (*(RespInputHeader*)pcurr).flags = 0; + + if (storeType == StoreType.Main || storeType == StoreType.All) + { + var status = context.Read(ref key, ref Unsafe.AsRef(pbCmdInput), ref output); + + if (status.IsPending) + { + StartPendingMetrics(); + CompletePendingForSession(ref status, ref output, ref context); + StopPendingMetrics(); + } + + if (status.Found) return GarnetStatus.OK; + } + + if ((storeType == StoreType.Object || storeType == StoreType.All) && !objectStoreBasicContext.IsNull) + { + var objInput = new ObjectInput + { + header = new RespInputHeader + { + type = milliseconds ? GarnetObjectType.PExpiretime : GarnetObjectType.Expiretime, + }, + }; + + var keyBA = key.ToByteArray(); + var objO = new GarnetObjectStoreOutput { spanByteAndMemory = output }; + var status = objectContext.Read(ref keyBA, ref objInput, ref objO); + + if (status.IsPending) + CompletePendingForObjectStoreSession(ref status, ref objO, ref objectContext); + + if (status.Found) + { + output = objO.spanByteAndMemory; + return GarnetStatus.OK; + } + } + return GarnetStatus.NOTFOUND; + } + public GarnetStatus SET(ref SpanByte key, ref SpanByte value, ref TContext context) where TContext : ITsavoriteContext { @@ -521,12 +586,33 @@ public GarnetStatus DELETE(byte[] key, StoreType store } public unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, StoreType storeType) + { + return RENAME(oldKeySlice, newKeySlice, storeType, false, out _); + } + + /// + /// Renames key to newkey if newkey does not yet exist. It returns an error when key does not exist. + /// + /// The old key to be renamed. + /// The new key name. + /// The type of store to perform the operation on. + /// + public unsafe GarnetStatus RENAMENX(ArgSlice oldKeySlice, ArgSlice newKeySlice, StoreType storeType, out int result) + { + return RENAME(oldKeySlice, newKeySlice, storeType, true, out result); + } + + private unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, StoreType storeType, bool isNX, out int result) { GarnetStatus returnStatus = GarnetStatus.NOTFOUND; + result = -1; // If same name check return early. if (oldKeySlice.ReadOnlySpan.SequenceEqual(newKeySlice.ReadOnlySpan)) + { + result = 1; return GarnetStatus.OK; + } bool createTransaction = false; if (txnManager.state != TxnState.Running) @@ -539,14 +625,12 @@ public unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, St var context = txnManager.LockableContext; var objectContext = txnManager.ObjectStoreLockableContext; + SpanByte oldKey = oldKeySlice.SpanByte; if (storeType == StoreType.Main || storeType == StoreType.All) { try { - SpanByte oldKey = oldKeySlice.SpanByte; - SpanByte newKey = newKeySlice.SpanByte; - SpanByte input = default; var o = new SpanByteAndMemory(); var status = GET(ref oldKey, ref input, ref o, ref context); @@ -558,16 +642,83 @@ public unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, St var ptrVal = (byte*)memoryHandle.Pointer; RespReadUtils.ReadUnsignedLengthHeader(out var headerLength, ref ptrVal, ptrVal + o.Length); - var value = SpanByte.FromPinnedPointer(ptrVal, headerLength); - SET(ref newKey, ref value, ref context); - - memoryHandle.Dispose(); - o.Memory.Dispose(); - - // Delete the old key - DELETE(ref oldKey, StoreType.Main, ref context, ref objectContext); - returnStatus = GarnetStatus.OK; + // Find expiration time of the old key + var expireSpan = new SpanByteAndMemory(); + var ttlStatus = TTL(ref oldKey, storeType, ref expireSpan, ref context, ref objectContext, true); + + if (ttlStatus == GarnetStatus.OK && !expireSpan.IsSpanByte) + { + using var expireMemoryHandle = expireSpan.Memory.Memory.Pin(); + var expirePtrVal = (byte*)expireMemoryHandle.Pointer; + RespReadUtils.TryRead64Int(out var expireTimeMs, ref expirePtrVal, expirePtrVal + expireSpan.Length, out var _); + + // If the key has an expiration, set the new key with the expiration + if (expireTimeMs > 0) + { + if (isNX) + { + // Move payload forward to make space for RespInputHeader and Metadata + var setValue = scratchBufferManager.FormatScratch(RespInputHeader.Size + sizeof(long), new ArgSlice(ptrVal, headerLength)); + var setValueSpan = setValue.SpanByte; + var setValuePtr = setValueSpan.ToPointerWithMetadata(); + setValueSpan.ExtraMetadata = DateTimeOffset.UtcNow.Ticks + TimeSpan.FromMilliseconds(expireTimeMs).Ticks; + ((RespInputHeader*)(setValuePtr + sizeof(long)))->cmd = RespCommand.SETEXNX; + ((RespInputHeader*)(setValuePtr + sizeof(long)))->flags = 0; + var newKey = newKeySlice.SpanByte; + var setStatus = SET_Conditional(ref newKey, ref setValueSpan, ref context); + + // For SET NX `NOTFOUND` means the operation succeeded + result = setStatus == GarnetStatus.NOTFOUND ? 1 : 0; + returnStatus = GarnetStatus.OK; + } + else + { + SETEX(newKeySlice, new ArgSlice(ptrVal, headerLength), TimeSpan.FromMilliseconds(expireTimeMs), ref context); + } + } + else if (expireTimeMs == -1) // Its possible to have expireTimeMs as 0 (Key expired or will be expired now) or -2 (Key does not exist), in those cases we don't SET the new key + { + if (isNX) + { + // Move payload forward to make space for RespInputHeader + var setValue = scratchBufferManager.FormatScratch(RespInputHeader.Size, new ArgSlice(ptrVal, headerLength)); + var setValueSpan = setValue.SpanByte; + var setValuePtr = setValueSpan.ToPointerWithMetadata(); + ((RespInputHeader*)setValuePtr)->cmd = RespCommand.SETEXNX; + ((RespInputHeader*)setValuePtr)->flags = 0; + var newKey = newKeySlice.SpanByte; + var setStatus = SET_Conditional(ref newKey, ref setValueSpan, ref context); + + // For SET NX `NOTFOUND` means the operation succeeded + result = setStatus == GarnetStatus.NOTFOUND ? 1 : 0; + returnStatus = GarnetStatus.OK; + } + else + { + SpanByte newKey = newKeySlice.SpanByte; + var value = SpanByte.FromPinnedPointer(ptrVal, headerLength); + SET(ref newKey, ref value, ref context); + } + } + + expireSpan.Memory.Dispose(); + memoryHandle.Dispose(); + o.Memory.Dispose(); + + // Delete the old key only when SET NX succeeded + if (isNX && result == 1) + { + DELETE(ref oldKey, StoreType.Main, ref context, ref objectContext); + } + else if (!isNX) + { + // Delete the old key + DELETE(ref oldKey, StoreType.Main, ref context, ref objectContext); + + returnStatus = GarnetStatus.OK; + } + } } } finally @@ -591,19 +742,36 @@ public unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, St try { byte[] oldKeyArray = oldKeySlice.ToArray(); - byte[] newKeyArray = newKeySlice.ToArray(); - var status = GET(oldKeyArray, out var value, ref objectContext); if (status == GarnetStatus.OK) { var valObj = value.garnetObject; - SET(newKeyArray, valObj, ref objectContext); - - // Delete the old key - DELETE(oldKeyArray, StoreType.Object, ref context, ref objectContext); + byte[] newKeyArray = newKeySlice.ToArray(); returnStatus = GarnetStatus.OK; + var canSetAndDelete = true; + if (isNX) + { + // Not using EXISTS method to avoid new allocation of Array for key + var getNewStatus = GET(newKeyArray, out _, ref objectContext); + canSetAndDelete = getNewStatus == GarnetStatus.NOTFOUND; + } + + if (canSetAndDelete) + { + // valObj already has expiration time, so no need to write expiration logic here + SET(newKeyArray, valObj, ref objectContext); + + // Delete the old key + DELETE(oldKeyArray, StoreType.Object, ref context, ref objectContext); + + result = 1; + } + else + { + result = 0; + } } } finally @@ -612,7 +780,6 @@ public unsafe GarnetStatus RENAME(ArgSlice oldKeySlice, ArgSlice newKeySlice, St txnManager.Commit(true); } } - return returnStatus; } @@ -686,20 +853,63 @@ public unsafe GarnetStatus EXPIRE(ArgSlice key, ArgSli /// Basic context for the main store /// Object context for the object store /// When true the command executed is PEXPIRE, expire by default. - /// + /// Return GarnetStatus.OK when key found, else GarnetStatus.NOTFOUND public unsafe GarnetStatus EXPIRE(ArgSlice key, TimeSpan expiry, out bool timeoutSet, StoreType storeType, ExpireOption expireOption, ref TContext context, ref TObjectContext objectStoreContext, bool milliseconds = false) where TContext : ITsavoriteContext where TObjectContext : ITsavoriteContext + { + return EXPIRE(key, DateTimeOffset.UtcNow.Ticks + expiry.Ticks, out timeoutSet, storeType, expireOption, ref context, ref objectStoreContext, milliseconds ? RespCommand.PEXPIRE : RespCommand.EXPIRE); + } + + /// + /// Set a timeout on key using absolute Unix timestamp (seconds since January 1, 1970). + /// + /// + /// + /// The key to set the timeout on. + /// Absolute Unix timestamp + /// True when the timeout was properly set. + /// The store to operate on. + /// Flags to use for the operation. + /// Basic context for the main store + /// Object context for the object store + /// When true, is treated as milliseconds else seconds + /// Return GarnetStatus.OK when key found, else GarnetStatus.NOTFOUND + public unsafe GarnetStatus EXPIREAT(ArgSlice key, long expiryTimestamp, out bool timeoutSet, StoreType storeType, ExpireOption expireOption, ref TContext context, ref TObjectContext objectStoreContext, bool milliseconds = false) + where TContext : ITsavoriteContext + where TObjectContext : ITsavoriteContext + { + var expiryTimestampTicks = milliseconds ? ConvertUtils.UnixTimestampInMillisecondsToTicks(expiryTimestamp) : ConvertUtils.UnixTimestampInSecondsToTicks(expiryTimestamp); + return EXPIRE(key, expiryTimestampTicks, out timeoutSet, storeType, expireOption, ref context, ref objectStoreContext, milliseconds ? RespCommand.PEXPIRE : RespCommand.EXPIRE); + } + + /// + /// Set a timeout on key using ticks. + /// + /// + /// + /// The key to set the timeout on. + /// The timestamp in ticks + /// True when the timeout was properly set. + /// The store to operate on. + /// Flags to use for the operation. + /// Basic context for the main store + /// Object context for the object store + /// Resp Command to be executed. + /// Return GarnetStatus.OK when key found, else GarnetStatus.NOTFOUND + private unsafe GarnetStatus EXPIRE(ArgSlice key, long expiryInTicks, out bool timeoutSet, StoreType storeType, ExpireOption expireOption, ref TContext context, ref TObjectContext objectStoreContext, RespCommand respCommand) + where TContext : ITsavoriteContext + where TObjectContext : ITsavoriteContext { byte* pbCmdInput = stackalloc byte[sizeof(int) + sizeof(long) + RespInputHeader.Size + sizeof(byte)]; *(int*)pbCmdInput = sizeof(long) + RespInputHeader.Size; - ((RespInputHeader*)(pbCmdInput + sizeof(int) + sizeof(long)))->cmd = milliseconds ? RespCommand.PEXPIRE : RespCommand.EXPIRE; + ((RespInputHeader*)(pbCmdInput + sizeof(int) + sizeof(long)))->cmd = respCommand; ((RespInputHeader*)(pbCmdInput + sizeof(int) + sizeof(long)))->flags = 0; *(pbCmdInput + sizeof(int) + sizeof(long) + RespInputHeader.Size) = (byte)expireOption; ref var input = ref SpanByte.Reinterpret(pbCmdInput); - input.ExtraMetadata = DateTimeOffset.UtcNow.Ticks + expiry.Ticks; + input.ExtraMetadata = expiryInTicks; var rmwOutput = stackalloc byte[ObjectOutputHeader.Size]; var output = new SpanByteAndMemory(SpanByte.FromPinnedPointer(rmwOutput, ObjectOutputHeader.Size)); @@ -741,7 +951,7 @@ public unsafe GarnetStatus EXPIRE(ArgSlice key, TimeSp { header = new RespInputHeader { - cmd = milliseconds ? RespCommand.PEXPIRE : RespCommand.EXPIRE, + cmd = respCommand, type = GarnetObjectType.Expire, }, parseState = parseState, diff --git a/libs/server/Storage/Session/ObjectStore/ListOps.cs b/libs/server/Storage/Session/ObjectStore/ListOps.cs index 4d85bdf4a1..c1244ae60e 100644 --- a/libs/server/Storage/Session/ObjectStore/ListOps.cs +++ b/libs/server/Storage/Session/ObjectStore/ListOps.cs @@ -54,7 +54,7 @@ public unsafe GarnetStatus ListPush(ArgSlice key, ArgSlice[] ele var status = RMWObjectStoreOperation(arrKey, ref input, out var output, ref objectStoreContext); itemsDoneCount = output.result1; - itemBroker?.HandleCollectionUpdate(arrKey); + itemBroker.HandleCollectionUpdate(arrKey); return status; } @@ -96,7 +96,7 @@ public unsafe GarnetStatus ListPush(ArgSlice key, ArgSlice eleme var status = RMWObjectStoreOperation(key.ToArray(), ref input, out var output, ref objectStoreContext); itemsDoneCount = output.result1; - itemBroker?.HandleCollectionUpdate(key.Span.ToArray()); + itemBroker.HandleCollectionUpdate(key.Span.ToArray()); return status; } @@ -241,6 +241,9 @@ public GarnetStatus ListMove(ArgSlice sourceKey, ArgSlice destinationKey, Operat element = default; var objectLockableContext = txnManager.ObjectStoreLockableContext; + if (itemBroker == null) + ThrowObjectStoreUninitializedException(); + // If source and destination are the same, the operation is equivalent to removing the last element from the list // and pushing it as first element of the list, so it can be considered as a list rotation command. bool sameKey = sourceKey.ReadOnlySpan.SequenceEqual(destinationKey.ReadOnlySpan); @@ -344,7 +347,7 @@ public GarnetStatus ListMove(ArgSlice sourceKey, ArgSlice destinationKey, Operat txnManager.Commit(true); } - itemBroker?.HandleCollectionUpdate(destinationKey.Span.ToArray()); + itemBroker.HandleCollectionUpdate(destinationKey.Span.ToArray()); return GarnetStatus.OK; } @@ -390,10 +393,26 @@ public GarnetStatus ListPush(byte[] key, ref ObjectInput input, where TObjectContext : ITsavoriteContext { var status = RMWObjectStoreOperation(key, ref input, out output, ref objectStoreContext); - itemBroker?.HandleCollectionUpdate(key); + itemBroker.HandleCollectionUpdate(key); return status; } + /// + /// The command returns the index of matching elements inside a Redis list. + /// By default, when no options are given, it will scan the list from head to tail, looking for the first match of "element". + /// + /// + /// + /// + /// + /// + /// + public GarnetStatus ListPosition(byte[] key, ref ObjectInput input, ref GarnetObjectStoreOutput outputFooter, ref TObjectContext objectStoreContext) + where TObjectContext : ITsavoriteContext + { + return ReadObjectStoreOperationWithOutput(key, ref input, ref objectStoreContext, ref outputFooter); + } + /// /// Trim an existing list so it only contains the specified range of elements. /// @@ -432,7 +451,7 @@ public GarnetStatus ListInsert(byte[] key, ref ObjectInput input where TObjectContext : ITsavoriteContext { var status = RMWObjectStoreOperation(key, ref input, out output, ref objectStoreContext); - itemBroker?.HandleCollectionUpdate(key); + itemBroker.HandleCollectionUpdate(key); return status; } diff --git a/libs/server/Storage/Session/StorageSession.cs b/libs/server/Storage/Session/StorageSession.cs index fd8cffa6c0..b6c440eba1 100644 --- a/libs/server/Storage/Session/StorageSession.cs +++ b/libs/server/Storage/Session/StorageSession.cs @@ -54,14 +54,13 @@ public StorageSession(StoreWrapper storeWrapper, ScratchBufferManager scratchBufferManager, GarnetSessionMetrics sessionMetrics, GarnetLatencyMetricsSession LatencyMetrics, - CollectionItemBroker itemBroker, ILogger logger = null) { this.sessionMetrics = sessionMetrics; this.LatencyMetrics = LatencyMetrics; this.scratchBufferManager = scratchBufferManager; this.logger = logger; - this.itemBroker = itemBroker; + this.itemBroker = storeWrapper.itemBroker; functionsState = storeWrapper.CreateFunctionsState(); diff --git a/libs/server/Storage/SizeTracker/CacheSizeTracker.cs b/libs/server/Storage/SizeTracker/CacheSizeTracker.cs index b319ef59eb..21b6bca3f2 100644 --- a/libs/server/Storage/SizeTracker/CacheSizeTracker.cs +++ b/libs/server/Storage/SizeTracker/CacheSizeTracker.cs @@ -22,13 +22,11 @@ public class CacheSizeTracker { internal readonly LogSizeTracker mainLogTracker; internal readonly LogSizeTracker readCacheTracker; - internal long targetSize; + public long TargetSize; private const int deltaFraction = 10; // 10% of target size private TsavoriteKV store; - internal long IndexSizeBytes => store.IndexSize * 64 + store.OverflowBucketCount * 64; - internal bool Stopped => mainLogTracker.Stopped && (readCacheTracker == null || readCacheTracker.Stopped); /// Helps calculate size of a record including heap memory in Object store. @@ -44,7 +42,7 @@ public readonly long CalculateRecordSize(RecordInfo recordInfo, byte[] key, IGar { long size = Utility.RoundUp(key.Length, IntPtr.Size) + MemoryUtils.ByteArrayOverhead; - if (!recordInfo.Tombstone) // ignore deleted values being evicted (they are accounted for by ConcurrentDeleter) + if (!recordInfo.Tombstone && value != null) // ignore deleted values being evicted (they are accounted for by ConcurrentDeleter) size += value.Size; return size; @@ -64,6 +62,7 @@ public CacheSizeTracker(TsavoriteKV 0); this.store = store; + this.TargetSize = targetSize; var logSizeCalculator = new LogSizeCalculator(); var (mainLogTargetSizeBytes, readCacheTargetSizeBytes) = CalculateLogTargetSizeBytes(targetSize); @@ -96,13 +95,8 @@ public void Start(CancellationToken token) /// Target size public (long mainLogSizeBytes, long readCacheSizeBytes) CalculateLogTargetSizeBytes(long newTargetSize) { - long residual = newTargetSize - IndexSizeBytes; - - if (residual <= 0) - throw new TsavoriteException($"Target size {newTargetSize} must be larger than index size {IndexSizeBytes}"); - - var mainLogSizeBytes = this.store.ReadCache == null ? residual : residual / 2; - var readCacheSizeBytes = this.store.ReadCache == null ? 0 : residual / 2; + var mainLogSizeBytes = this.store.ReadCache == null ? newTargetSize : newTargetSize / 2; + var readCacheSizeBytes = this.store.ReadCache == null ? 0 : newTargetSize / 2; return (mainLogSizeBytes, readCacheSizeBytes); } diff --git a/libs/server/StoreWrapper.cs b/libs/server/StoreWrapper.cs index 925de2c3cd..ce7674a5fa 100644 --- a/libs/server/StoreWrapper.cs +++ b/libs/server/StoreWrapper.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Concurrent; +using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Net.Sockets; @@ -51,7 +52,7 @@ public sealed class StoreWrapper /// /// Get server /// - public GarnetServerTcp GetServer() => (GarnetServerTcp)server; + public GarnetServerTcp GetTcpServer() => (GarnetServerTcp)server; /// /// Access control list governing all commands @@ -75,8 +76,7 @@ public sealed class StoreWrapper /// public readonly ILoggerFactory loggerFactory; - internal readonly string localEndpoint; - + internal readonly CollectionItemBroker itemBroker; internal readonly CustomCommandManager customCommandManager; internal readonly GarnetServerMonitor monitor; internal readonly WatchVersionMap versionMap; @@ -143,6 +143,9 @@ public StoreWrapper( this.GarnetObjectSerializer = new GarnetObjectSerializer(this.customCommandManager); this.loggingFrequncy = TimeSpan.FromSeconds(serverOptions.LoggingFrequency); + if (!serverOptions.DisableObjects) + this.itemBroker = new CollectionItemBroker(); + // Initialize store scripting cache if (serverOptions.EnableLua) this.storeScriptCache = new ConcurrentDictionary(new ByteArrayComparer()); @@ -176,13 +179,6 @@ public StoreWrapper( if (clusterFactory != null) clusterProvider = clusterFactory.CreateClusterProvider(this); - - string address = serverOptions.Address ?? GetIp(); - int port = serverOptions.Port; - localEndpoint = address + ":" + port; - - logger?.LogInformation("Local endpoint: {localEndpoint}", localEndpoint); - ctsCommit = new(); run_id = Generator.CreateHexId(); } @@ -191,16 +187,28 @@ public StoreWrapper( /// Get IP /// /// - public static string GetIp() + public string GetIp() { - string localIP; - using (Socket socket = new(AddressFamily.InterNetwork, SocketType.Dgram, 0)) + var localEndpoint = GetTcpServer().GetEndPoint; + if (localEndpoint.Address.Equals(IPAddress.Any)) + { + using (Socket socket = new(AddressFamily.InterNetwork, SocketType.Dgram, 0)) + { + socket.Connect("8.8.8.8", 65530); + var endPoint = socket.LocalEndPoint as IPEndPoint; + return endPoint.Address.ToString(); + } + } + else if (localEndpoint.Address.Equals(IPAddress.IPv6Any)) { - socket.Connect("8.8.8.8", 65530); - IPEndPoint endPoint = socket.LocalEndPoint as IPEndPoint; - localIP = endPoint.Address.ToString(); + using (Socket socket = new(AddressFamily.InterNetworkV6, SocketType.Dgram, 0)) + { + socket.Connect("2001:4860:4860::8888", 65530); + var endPoint = socket.LocalEndPoint as IPEndPoint; + return endPoint.Address.ToString(); + } } - return localIP; + return localEndpoint.Address.ToString(); } internal FunctionsState CreateFunctionsState() @@ -340,7 +348,7 @@ async Task CommitTask(int commitFrequencyMs, ILogger logger = null, Cancellation } else { - await appendOnlyFile.CommitAsync(token); + await appendOnlyFile.CommitAsync(null, token); await Task.Delay(commitFrequencyMs, token); } } @@ -601,6 +609,7 @@ public void Dispose() //Wait for checkpoints to complete and disable checkpointing _checkpointTaskLock.WriteLock(); + itemBroker?.Dispose(); monitor?.Dispose(); ctsCommit?.Cancel(); @@ -767,5 +776,46 @@ private async Task InitiateCheckpoint(bool full, CheckpointType checkpointType, logger?.LogInformation("Completed checkpoint"); } + + public bool HasKeysInSlots(List slots) + { + if (slots.Count > 0) + { + bool hasKeyInSlots = false; + { + using var iter = store.Iterate>(new SimpleSessionFunctions()); + while (!hasKeyInSlots && iter.GetNext(out RecordInfo record)) + { + ref var key = ref iter.GetKey(); + ushort hashSlotForKey = HashSlotUtils.HashSlot(ref key); + if (slots.Contains(hashSlotForKey)) + { + hasKeyInSlots = true; + } + } + } + + if (!hasKeyInSlots && objectStore != null) + { + var functionsState = CreateFunctionsState(); + var objstorefunctions = new ObjectSessionFunctions(functionsState); + var objectStoreSession = objectStore?.NewSession(objstorefunctions); + var iter = objectStoreSession.Iterate(); + while (!hasKeyInSlots && iter.GetNext(out RecordInfo record)) + { + ref var key = ref iter.GetKey(); + ushort hashSlotForKey = HashSlotUtils.HashSlot(key.AsSpan()); + if (slots.Contains(hashSlotForKey)) + { + hasKeyInSlots = true; + } + } + } + + return hasKeyInSlots; + } + + return false; + } } } \ No newline at end of file diff --git a/libs/server/Transaction/TransactionManager.cs b/libs/server/Transaction/TransactionManager.cs index bce85f8592..9123eaf1f7 100644 --- a/libs/server/Transaction/TransactionManager.cs +++ b/libs/server/Transaction/TransactionManager.cs @@ -57,9 +57,6 @@ public sealed unsafe partial class TransactionManager // Not readonly to avoid defensive copy GarnetWatchApi garnetTxPrepareApi; - // Cluster session - IClusterSession clusterSession; - // Not readonly to avoid defensive copy LockableGarnetApi garnetTxMainApi; @@ -118,7 +115,6 @@ internal TransactionManager( this.logger = logger; this.respSession = respSession; - this.clusterSession = respSession.clusterSession; watchContainer = new WatchedKeysContainer(initialSliceBufferSize, functionsState.watchVersionMap); keyEntries = new TxnKeyEntries(initialSliceBufferSize, lockableContext, objectStoreLockableContext); @@ -247,6 +243,10 @@ internal void Commit(bool internal_txn = false) internal void Watch(ArgSlice key, StoreType type) { + // Update watch type if object store is disabled + if (type == StoreType.All && objectStoreBasicContext.IsNull) + type = StoreType.Main; + UpdateTransactionStoreType(type); watchContainer.AddWatch(key, type); diff --git a/libs/server/Transaction/TxnKeyManager.cs b/libs/server/Transaction/TxnKeyManager.cs index be90499b1d..0dec851736 100644 --- a/libs/server/Transaction/TxnKeyManager.cs +++ b/libs/server/Transaction/TxnKeyManager.cs @@ -31,7 +31,7 @@ public unsafe void VerifyKeyOwnership(ArgSlice key, LockType type) if (!clusterEnabled) return; bool readOnly = type == LockType.Shared; - if (!clusterSession.CheckSingleKeySlotVerify(key, readOnly, respSession.SessionAsking)) + if (!respSession.clusterSession.CheckSingleKeySlotVerify(key, readOnly, respSession.SessionAsking)) { this.state = TxnState.Aborted; return; diff --git a/libs/server/Transaction/TxnRespCommands.cs b/libs/server/Transaction/TxnRespCommands.cs index d1084f1e79..b62703aee1 100644 --- a/libs/server/Transaction/TxnRespCommands.cs +++ b/libs/server/Transaction/TxnRespCommands.cs @@ -79,7 +79,7 @@ private bool NetworkEXEC() else { endReadHead = _origReadHead; - while (!RespWriteUtils.WriteNull(ref dcurr, dend)) + while (!RespWriteUtils.WriteNullArray(ref dcurr, dend)) SendAndReset(); } diff --git a/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/BenchmarkDotNetTestsApp.cs b/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/BenchmarkDotNetTestsApp.cs index 85baa0ed05..ef5777c186 100644 --- a/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/BenchmarkDotNetTestsApp.cs +++ b/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/BenchmarkDotNetTestsApp.cs @@ -11,6 +11,20 @@ public class BenchmarkDotNetTestsApp public static void Main(string[] args) { + // Check for debugging a test + if (args[0].ToLower() == "cursor") + { + var test = new IterationTests + { + FlushAndEvict = true + }; + test.SetupPopulatedStore(); + test.Cursor(); + test.TearDown(); + return; + } + + // Do regular invocation. BenchmarkSwitcher.FromAssembly(typeof(BenchmarkDotNetTestsApp).Assembly).Run(args); } } diff --git a/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/IterationTests.cs b/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/IterationTests.cs new file mode 100644 index 0000000000..519c32c55c --- /dev/null +++ b/libs/storage/Tsavorite/cs/benchmark/BDN-Tsavorite.Benchmark/IterationTests.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. + +using BenchmarkDotNet.Attributes; +using BenchmarkDotNet.Configs; +using Tsavorite.core; + +#pragma warning disable 0649 // Field 'field' is never assigned to, and will always have its default value 'value'; happens due to [Params(..)] +#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member +#pragma warning disable IDE0048 // Add parentheses for clarity +#pragma warning disable IDE0130 // Namespace does not match folder structure + +namespace BenchmarkDotNetTests +{ +#pragma warning disable IDE0065 // Misplaced using directive + using SpanByteStoreFunctions = StoreFunctions; + + [GroupBenchmarksBy(BenchmarkLogicalGroupRule.ByCategory, BenchmarkLogicalGroupRule.ByParams)] + public class IterationTests + { + const int NumRecords = 1_000_000; + + [Params(true, false)] + public bool FlushAndEvict; + + TsavoriteKV> store; + IDevice logDevice; + string logDirectory; + + void SetupStore() + { + logDirectory = BenchmarkDotNetTestsApp.TestDirectory; + var logFilename = Path.Combine(logDirectory, $"{nameof(IterationTests)}_{Guid.NewGuid()}.log"); + logDevice = Devices.CreateLogDevice(logFilename, preallocateFile: true, deleteOnClose: true, useIoCompletionPort: true); + + store = new(new() + { + IndexSize = 1L << 26, + LogDevice = logDevice + }, StoreFunctions.Create() + , (allocatorSettings, storeFunctions) => new(allocatorSettings, storeFunctions) + ); + } + + unsafe void PopulateStore() + { + using var session = store.NewSession>(new()); + var bContext = session.BasicContext; + + Span keyVec = stackalloc byte[sizeof(long)]; + var keySpanByte = SpanByte.FromPinnedSpan(keyVec); + + Span valueVec = stackalloc byte[sizeof(long)]; + var valueSpanByte = SpanByte.FromPinnedSpan(valueVec); + + for (long ii = 0; ii < NumRecords; ++ii) + { + *(long*)keySpanByte.ToPointer() = ii; + *(long*)valueSpanByte.ToPointer() = ii + NumRecords; + _ = bContext.Upsert(keySpanByte, valueSpanByte); + } + + if (FlushAndEvict) + store.Log.FlushAndEvict(wait: true); + } + + [GlobalSetup] + public void SetupPopulatedStore() + { + SetupStore(); + PopulateStore(); + } + + [GlobalCleanup] + public void TearDown() + { + store?.Dispose(); + store = null; + logDevice?.Dispose(); + logDevice = null; + try + { + Directory.Delete(logDirectory); + } + catch { } + } + + [BenchmarkCategory("Cursor"), Benchmark] + public void Cursor() + { + using var session = store.NewSession>(new()); + + var scanFunctions = new ScanFunctions(); + var cursor = 0L; + session.ScanCursor(ref cursor, long.MaxValue, scanFunctions); + if (scanFunctions.Count < NumRecords) + throw new ApplicationException($"Incomplete iteration; {scanFunctions.Count} of {NumRecords} records returned"); + } + + class ScanCounter + { + internal int count; + } + + internal struct ScanFunctions : IScanIteratorFunctions + { + private readonly ScanCounter counter; + + internal readonly int Count => counter.count; + + public ScanFunctions() => counter = new(); + + /// + public bool OnStart(long beginAddress, long endAddress) => true; + + /// + public bool SingleReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) + { + ++counter.count; + cursorRecordResult = CursorRecordResult.Accept; + return true; + } + + /// + public bool ConcurrentReader(ref SpanByte key, ref SpanByte value, RecordMetadata recordMetadata, long numberOfRecords, out CursorRecordResult cursorRecordResult) + => SingleReader(ref key, ref value, recordMetadata, numberOfRecords, out cursorRecordResult); + + /// + public void OnStop(bool completed, long numberOfRecords) { } + + /// + public void OnException(Exception exception, long numberOfRecords) { } + } + } +} \ No newline at end of file diff --git a/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorBase.cs b/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorBase.cs index 83cf9073c1..2d9380a7dd 100644 --- a/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorBase.cs +++ b/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorBase.cs @@ -66,8 +66,8 @@ public abstract partial class AllocatorBaseHow many pages do we leave empty in the in-memory buffer (between 0 and BufferSize-1) private int emptyPageCount; - /// HeadOFfset lag address - internal long HeadOffsetLagAddress; + /// HeadAddress offset from tail (currently page-aligned) + internal long HeadAddressLagOffset; /// /// Number of or @@ -78,8 +78,8 @@ public abstract partial class AllocatorBaseLog mutable fraction protected readonly double LogMutableFraction; - /// ReadOnlyOffset lag (from tail) - protected long ReadOnlyLagAddress; + /// ReadOnlyAddress offset from tail (currently page-aligned) + protected long ReadOnlyAddressLagOffset; #endregion @@ -502,7 +502,7 @@ internal void WriteAsync(IntPtr alignedSourceAddress, ulong alignedDes } } - internal long GetReadOnlyLagAddress() => ReadOnlyLagAddress; + internal long GetReadOnlyAddressLagOffset() => ReadOnlyAddressLagOffset; protected readonly ILogger logger; @@ -577,8 +577,8 @@ private protected AllocatorBase(LogSettings settings, TStoreFunctions storeFunct if (SegmentSize < PageSize) throw new TsavoriteException($"Segment ({SegmentSize}) must be at least of page size ({PageSize})"); - if ((LogTotalSizeBits != 0) && (LogTotalSizeBytes < PageSize)) - throw new TsavoriteException($"Memory size ({LogTotalSizeBytes}) must be configured to be either 1 (i.e., 0 bits) or at least page size ({PageSize})"); + if ((LogTotalSizeBits != 0) && (LogTotalSizeBytes < PageSize * 2)) + throw new TsavoriteException($"Memory size ({LogTotalSizeBytes}) must be at least twice the page size ({PageSize})"); // Readonlymode has MemorySizeBits 0 => skip the check if (settings.MemorySizeBits > 0 && settings.MinEmptyPageCount > MaxEmptyPageCount) @@ -714,6 +714,9 @@ protected void Initialize(long firstValidAddress) /// Minimum number of empty pages in circular buffer to be maintained to account for non-power-of-two size public int MinEmptyPageCount; + /// Maximum memory size in bytes + public long MaxMemorySizeBytes => (BufferSize - MinEmptyPageCount) * (long)PageSize; + /// How many pages do we leave empty in the in-memory buffer (between 0 and BufferSize-1) public int EmptyPageCount { @@ -733,9 +736,9 @@ public int EmptyPageCount emptyPageCount = value; headOffsetLagSize -= emptyPageCount; - // Lag addresses are the number of pages "behind" TailPageOffset (the tail in the circular buffer). - ReadOnlyLagAddress = (long)(LogMutableFraction * headOffsetLagSize) << LogPageSizeBits; - HeadOffsetLagAddress = (long)headOffsetLagSize << LogPageSizeBits; + // Address lag offsets correspond to the number of pages "behind" TailPageOffset (the tail in the circular buffer). + ReadOnlyAddressLagOffset = (long)(LogMutableFraction * headOffsetLagSize) << LogPageSizeBits; + HeadAddressLagOffset = (long)headOffsetLagSize << LogPageSizeBits; } // Force eviction now if empty page count has increased @@ -781,10 +784,21 @@ protected void IncrementAllocatedPageCount() public long GetTailAddress() { var local = TailPageOffset; - if (local.Offset >= PageSize) + + // Handle corner cases during page overflow + // The while loop is guaranteed to terminate because HandlePageOverflow + // ensures that it fixes the unstable TailPageOffset immediately. + while (local.Offset >= PageSize) { - local.Page++; - local.Offset = 0; + if (local.Offset == PageSize) + { + local.Page++; + local.Offset = 0; + break; + } + // Offset is being adjusted by overflow thread, spin-wait + Thread.Yield(); + local = TailPageOffset; } return ((long)local.Page << LogPageSizeBits) | (uint)local.Offset; } @@ -810,7 +824,8 @@ public long GetTailAddress() /// Get sector size for main hlog device public int GetDeviceSectorSize() => sectorSize; - void AllocatePagesWithException(int pageIndex, PageOffset localTailPageOffset) + [MethodImpl(MethodImplOptions.NoInlining)] + void AllocatePagesWithException(int pageIndex, PageOffset localTailPageOffset, int numSlots) { try { @@ -824,26 +839,152 @@ void AllocatePagesWithException(int pageIndex, PageOffset localTailPageOffset) } catch { - localTailPageOffset.Offset = PageSize; + // Reset to previous tail + localTailPageOffset.PageAndOffset -= numSlots; Interlocked.Exchange(ref TailPageOffset.PageAndOffset, localTailPageOffset.PageAndOffset); throw; } } + /// + /// Throw Tsavorite exception with message. We use a method wrapper so that + /// the caller method can execute inlined. + /// + /// + /// + [MethodImpl(MethodImplOptions.NoInlining)] + static void ThrowTsavoriteException(string message) + => throw new TsavoriteException(message); + + /// + /// Whether we need to shift addresses when turning the page. + /// + /// The page we are turning to + /// Local copy of PageOffset (includes the addition of numSlots) + /// Size of new allocation + /// + bool NeedToShiftAddress(long pageIndex, PageOffset localTailPageOffset, int numSlots) + { + var tailAddress = (((long)localTailPageOffset.Page) << LogPageSizeBits) | ((long)(localTailPageOffset.Offset - numSlots)); + var shiftAddress = pageIndex << LogPageSizeBits; + + // Check whether we need to shift ROA + var desiredReadOnlyAddress = shiftAddress - ReadOnlyAddressLagOffset; + if (desiredReadOnlyAddress > tailAddress) + desiredReadOnlyAddress = tailAddress; + if (desiredReadOnlyAddress > ReadOnlyAddress) + return true; + + // Check whether we need to shift HA + var desiredHeadAddress = shiftAddress - HeadAddressLagOffset; + var currentFlushedUntilAddress = FlushedUntilAddress; + if (desiredHeadAddress > currentFlushedUntilAddress) + desiredHeadAddress = currentFlushedUntilAddress; + if (desiredHeadAddress > tailAddress) + desiredHeadAddress = tailAddress; + if (desiredHeadAddress > HeadAddress) + return true; + + return false; + } + + /// + /// Shift log addresses when turning the page. + /// + /// The page we are turning to + void IssueShiftAddress(long pageIndex) + { + // Issue the shift of address + var shiftAddress = pageIndex << LogPageSizeBits; + var tailAddress = GetTailAddress(); + + long desiredReadOnlyAddress = shiftAddress - ReadOnlyAddressLagOffset; + if (desiredReadOnlyAddress > tailAddress) + desiredReadOnlyAddress = tailAddress; + ShiftReadOnlyAddress(desiredReadOnlyAddress); + + long desiredHeadAddress = shiftAddress - HeadAddressLagOffset; + if (desiredHeadAddress > tailAddress) + desiredHeadAddress = tailAddress; + ShiftHeadAddress(desiredHeadAddress); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + long HandlePageOverflow(ref PageOffset localTailPageOffset, int numSlots) + { + int pageIndex = localTailPageOffset.Page + 1; + + // This thread is trying to allocate at an offset past where one or more previous threads + // already overflowed; exit and allow the first overflow thread to proceed. Do not try to remove + // the update to TailPageOffset that was done by this thread; that will be overwritten when + // the first overflow thread finally completes and updates TailPageOffset. + if (localTailPageOffset.Offset - numSlots > PageSize) + { + if (NeedToWait(pageIndex)) + return 0; // RETRY_LATER + return -1; // RETRY_NOW + } + + // The single thread that "owns" the page-increment proceeds below. This is the thread for which: + // 1. Old image of offset (pre-Interlocked.Increment) is <= PageSize, and + // 2. New image of offset (post-Interlocked.Increment) is > PageSize. + if (NeedToWait(pageIndex)) + { + // Reset to previous tail so that next attempt can retry + localTailPageOffset.PageAndOffset -= numSlots; + Interlocked.Exchange(ref TailPageOffset.PageAndOffset, localTailPageOffset.PageAndOffset); + + // Shift only after TailPageOffset is reset to a valid state + IssueShiftAddress(pageIndex); + + return 0; // RETRY_LATER + } + + // We next verify that: + // 1. The next page (pageIndex) is ready to use (i.e., closed) + // 2. We have issued any necessary address shifting at the page-turn boundary. + // If either cannot be verified, we can ask the caller to retry now (immediately), because it is + // an ephemeral state. + if (CannotAllocate(pageIndex) || NeedToShiftAddress(pageIndex, localTailPageOffset, numSlots)) + { + // Reset to previous tail so that next attempt can retry + localTailPageOffset.PageAndOffset -= numSlots; + Interlocked.Exchange(ref TailPageOffset.PageAndOffset, localTailPageOffset.PageAndOffset); + + // Shift only after TailPageOffset is reset to a valid state + IssueShiftAddress(pageIndex); + + return -1; // RETRY_NOW + } + + // Allocate next page and set new tail + if (!_wrapper.IsAllocated(pageIndex % BufferSize) || !_wrapper.IsAllocated((pageIndex + 1) % BufferSize)) + AllocatePagesWithException(pageIndex, localTailPageOffset, numSlots); + + localTailPageOffset.Page++; + localTailPageOffset.Offset = numSlots; + TailPageOffset = localTailPageOffset; + + // At this point, the slot is allocated and we are not allowed to refresh epochs any longer. + + // Offset is zero, for the first allocation on the new page + return ((long)localTailPageOffset.Page) << LogPageSizeBits; + } + /// Try allocate, no thread spinning allowed /// Number of slots to allocate /// The allocated logical address, or 0 in case of inability to allocate [MethodImpl(MethodImplOptions.AggressiveInlining)] - public long TryAllocate(int numSlots = 1) + long TryAllocate(int numSlots = 1) { if (numSlots > PageSize) - throw new TsavoriteException("Entry does not fit on page"); + ThrowTsavoriteException("Entry does not fit on page"); PageOffset localTailPageOffset = default; localTailPageOffset.PageAndOffset = TailPageOffset.PageAndOffset; // Necessary to check because threads keep retrying and we do not - // want to overflow offset more than once per thread + // want to overflow the offset more than once per thread if (localTailPageOffset.Offset > PageSize) { if (NeedToWait(localTailPageOffset.Page + 1)) @@ -851,63 +992,23 @@ public long TryAllocate(int numSlots = 1) return -1; // RETRY_NOW } - // Determine insertion index. + // Determine insertion index. Note that this forms a kind of "lock"; after the first thread does this, other threads that do + // it will see that another thread got there first because the subsequent "back up by numSlots" will still be past PageSize, + // so they will exit and RETRY in HandlePageOverflow; the first thread "owns" the overflow operation and must stabilize it. localTailPageOffset.PageAndOffset = Interlocked.Add(ref TailPageOffset.PageAndOffset, numSlots); - int page = localTailPageOffset.Page; - int offset = localTailPageOffset.Offset - numSlots; - - #region HANDLE PAGE OVERFLOW + // Slow path when we reach the end of a page. if (localTailPageOffset.Offset > PageSize) { - int pageIndex = localTailPageOffset.Page + 1; - - // All overflow threads try to shift addresses - long shiftAddress = ((long)pageIndex) << LogPageSizeBits; - PageAlignedShiftReadOnlyAddress(shiftAddress); - PageAlignedShiftHeadAddress(shiftAddress); - - // This thread is trying to allocate at an offset past where one or more previous threads - // already overflowed; exit and allow the first overflow thread to proceed - if (offset > PageSize) - { - if (NeedToWait(pageIndex)) - return 0; // RETRY_LATER - return -1; // RETRY_NOW - } - - if (NeedToWait(pageIndex)) - { - // Reset to end of page so that next attempt can retry - localTailPageOffset.Offset = PageSize; - Interlocked.Exchange(ref TailPageOffset.PageAndOffset, localTailPageOffset.PageAndOffset); - return 0; // RETRY_LATER - } - - // The thread that "makes" the offset incorrect should allocate next page and set new tail - if (CannotAllocate(pageIndex)) - { - // Reset to end of page so that next attempt can retry - localTailPageOffset.Offset = PageSize; - Interlocked.Exchange(ref TailPageOffset.PageAndOffset, localTailPageOffset.PageAndOffset); - return -1; // RETRY_NOW - } - - if (!_wrapper.IsAllocated(pageIndex % BufferSize) || !_wrapper.IsAllocated((pageIndex + 1) % BufferSize)) - AllocatePagesWithException(pageIndex, localTailPageOffset); - - localTailPageOffset.Page++; - localTailPageOffset.Offset = numSlots; - TailPageOffset = localTailPageOffset; - page++; - offset = 0; + // Note that TailPageOffset is now unstable -- there may be a GetTailAddress call spinning for + // it to stabilize. Therefore, HandlePageOverflow needs to stabilize TailPageOffset immediately, + // before performing any epoch bumps or system calls. + return HandlePageOverflow(ref localTailPageOffset, numSlots); } - #endregion - - return (((long)page) << LogPageSizeBits) | ((long)offset); + return (((long)localTailPageOffset.Page) << LogPageSizeBits) | ((long)(localTailPageOffset.Offset - numSlots)); } - /// Try allocate, spin for RETRY_NOW case + /// Try allocate, spin for RETRY_NOW (logicalAddress is less than 0) case /// Number of slots to allocate /// The allocated logical address, or 0 in case of inability to allocate [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -915,7 +1016,11 @@ public long TryAllocateRetryNow(int numSlots = 1) { long logicalAddress; while ((logicalAddress = TryAllocate(numSlots)) < 0) + { + _ = TryComplete(); epoch.ProtectAndDrain(); + Thread.Yield(); + } return logicalAddress; } @@ -978,27 +1083,30 @@ public void ShiftBeginAddress(long newBeginAddress, bool truncateLog, bool noFlu var flushEvent = FlushEvent; _ = ShiftReadOnlyAddress(newBeginAddress, noFlush); - // Wait for flush to complete - var spins = 0; - while (true) + if (!noFlush) { - if (FlushedUntilAddress >= newBeginAddress) - break; - if (++spins < Constants.kFlushSpinCount) - { - _ = Thread.Yield(); - continue; - } - try + // Wait for flush to complete + var spins = 0; + while (true) { - epoch.Suspend(); - flushEvent.Wait(); - } - finally - { - epoch.Resume(); + if (FlushedUntilAddress >= newBeginAddress) + break; + if (++spins < Constants.kFlushSpinCount) + { + _ = Thread.Yield(); + continue; + } + try + { + epoch.Suspend(); + flushEvent.Wait(); + } + finally + { + epoch.Resume(); + } + flushEvent = FlushEvent; } - flushEvent = FlushEvent; } // Then shift head address @@ -1145,7 +1253,7 @@ private void DebugPrintAddresses() private void PageAlignedShiftReadOnlyAddress(long currentTailAddress) { long pageAlignedTailAddress = currentTailAddress & ~PageSizeMask; - long desiredReadOnlyAddress = pageAlignedTailAddress - ReadOnlyLagAddress; + long desiredReadOnlyAddress = pageAlignedTailAddress - ReadOnlyAddressLagOffset; if (Utility.MonotonicUpdate(ref ReadOnlyAddress, desiredReadOnlyAddress, out _)) { // Debug.WriteLine("Allocate: Moving read-only offset from {0:X} to {1:X}", oldReadOnlyAddress, desiredReadOnlyAddress); @@ -1160,7 +1268,20 @@ private void PageAlignedShiftReadOnlyAddress(long currentTailAddress) /// [MethodImpl(MethodImplOptions.AggressiveInlining)] private void PageAlignedShiftHeadAddress(long currentTailAddress) - => ShiftHeadAddress((currentTailAddress & ~PageSizeMask) - HeadOffsetLagAddress); + { + var desiredHeadAddress = (currentTailAddress & ~PageSizeMask) - HeadAddressLagOffset; + + // Obtain local values of variables that can change + var currentFlushedUntilAddress = FlushedUntilAddress; + if (desiredHeadAddress > currentFlushedUntilAddress) + desiredHeadAddress = currentFlushedUntilAddress & ~PageSizeMask; + + if (Utility.MonotonicUpdate(ref HeadAddress, desiredHeadAddress, out _)) + { + // Debug.WriteLine("Allocate: Moving head offset from {0:X} to {1:X}", oldHeadAddress, newHeadAddress); + epoch.BumpCurrentEpoch(() => OnPagesClosed(desiredHeadAddress)); + } + } /// /// Tries to shift head address to specified value @@ -1168,13 +1289,17 @@ private void PageAlignedShiftHeadAddress(long currentTailAddress) /// public long ShiftHeadAddress(long desiredHeadAddress) { - //obtain local values of variables that can change + // Obtain local values of variables that can change long currentFlushedUntilAddress = FlushedUntilAddress; long newHeadAddress = desiredHeadAddress; - if (currentFlushedUntilAddress < newHeadAddress) + if (newHeadAddress > currentFlushedUntilAddress) newHeadAddress = currentFlushedUntilAddress; + if (newHeadAddress % (1 << LogPageSizeBits) != 0) + { + + } if (Utility.MonotonicUpdate(ref HeadAddress, newHeadAddress, out _)) { // Debug.WriteLine("Allocate: Moving head offset from {0:X} to {1:X}", oldHeadAddress, newHeadAddress); @@ -1444,12 +1569,13 @@ public void AsyncFlushPages(long fromAddress, long untilAddress, bool noFlush = asyncResult.fromAddress = fromAddress; } + bool skip = false; if (asyncResult.untilAddress <= BeginAddress) { // Short circuit as no flush needed _ = Utility.MonotonicUpdate(ref PageStatusIndicator[flushPage % BufferSize].LastFlushedUntilAddress, BeginAddress, out _); ShiftFlushedUntilAddress(); - continue; + skip = true; } if (IsNullDevice || noFlush) @@ -1457,9 +1583,11 @@ public void AsyncFlushPages(long fromAddress, long untilAddress, bool noFlush = // Short circuit as no flush needed _ = Utility.MonotonicUpdate(ref PageStatusIndicator[flushPage % BufferSize].LastFlushedUntilAddress, asyncResult.untilAddress, out _); ShiftFlushedUntilAddress(); - continue; + skip = true; } + if (skip) continue; + // Partial page starting point, need to wait until the // ongoing adjacent flush is completed to ensure correctness if (GetOffsetInPage(asyncResult.fromAddress) > 0) diff --git a/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorScan.cs b/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorScan.cs index 5496953d02..4ab5247f20 100644 --- a/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorScan.cs +++ b/libs/storage/Tsavorite/cs/src/core/Allocator/AllocatorScan.cs @@ -8,22 +8,6 @@ namespace Tsavorite.core { - internal sealed class ScanCursorState - { - internal IScanIteratorFunctions functions; - internal long acceptedCount; // Number of records pushed to and accepted by the caller - internal bool endBatch; // End the batch (but return a valid cursor for the next batch, as of "count" records had been returned) - internal bool stop; // Stop the operation (as if all records in the db had been returned) - - internal void Initialize(IScanIteratorFunctions scanIteratorFunctions) - { - functions = scanIteratorFunctions; - acceptedCount = 0; - endBatch = false; - stop = false; - } - } - public abstract partial class AllocatorBase : IDisposable where TStoreFunctions : IStoreFunctions where TAllocator : IAllocator @@ -205,7 +189,8 @@ internal unsafe bool GetFromDiskAndPushToReader(ref TKey key, re internal abstract bool ScanCursor(TsavoriteKV store, ScanCursorState scanCursorState, ref long cursor, long count, TScanFunctions scanFunctions, long endAddress, bool validateCursor) where TScanFunctions : IScanIteratorFunctions; - private protected bool ScanLookup(TsavoriteKV store, ScanCursorState scanCursorState, ref long cursor, long count, TScanFunctions scanFunctions, TScanIterator iter, bool validateCursor) + private protected bool ScanLookup(TsavoriteKV store, + ScanCursorState scanCursorState, ref long cursor, long count, TScanFunctions scanFunctions, TScanIterator iter, bool validateCursor) where TScanFunctions : IScanIteratorFunctions where TScanIterator : ITsavoriteScanIterator, IPushScanIterator { @@ -229,7 +214,7 @@ private protected bool ScanLookup(TSessionFunctionsWrapper sessionFunctions, ScanCursorState scanCursorState, RecordInfo recordInfo, ref TKey key, ref TValue value, long minAddress) + internal Status ConditionalScanPush(TSessionFunctionsWrapper sessionFunctions, ScanCursorState scanCursorState, RecordInfo recordInfo, + ref TKey key, ref TValue value, long currentAddress, long minAddress) where TSessionFunctionsWrapper : ISessionFunctionsWrapper { Debug.Assert(epoch.ThisInstanceProtected(), "This is called only from ScanLookup so the epoch should be protected"); @@ -273,7 +259,7 @@ internal Status ConditionalScanPush(sessionFunctions, ref key, ref stackCtx, minAddress, out internalStatus, out needIO)) + if (sessionFunctions.Store.TryFindRecordInMainLogForConditionalOperation(sessionFunctions, ref key, ref stackCtx, currentAddress, minAddress, out internalStatus, out needIO)) return Status.CreateFound(); } while (sessionFunctions.Store.HandleImmediateNonPendingRetryStatus(internalStatus, sessionFunctions)); diff --git a/libs/storage/Tsavorite/cs/src/core/Allocator/ScanCursorState.cs b/libs/storage/Tsavorite/cs/src/core/Allocator/ScanCursorState.cs new file mode 100644 index 0000000000..ce53b1474a --- /dev/null +++ b/libs/storage/Tsavorite/cs/src/core/Allocator/ScanCursorState.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +namespace Tsavorite.core +{ + internal sealed class ScanCursorState + { + internal IScanIteratorFunctions functions; + internal long acceptedCount; // Number of records pushed to and accepted by the caller + internal bool endBatch; // End the batch (but return a valid cursor for the next batch, as of "count" records had been returned) + internal bool stop; // Stop the operation (as if all records in the db had been returned) + + internal void Initialize(IScanIteratorFunctions scanIteratorFunctions) + { + functions = scanIteratorFunctions; + acceptedCount = 0; + endBatch = false; + stop = false; + } + } +} \ No newline at end of file diff --git a/libs/storage/Tsavorite/cs/src/core/Allocator/WorkQueueLIFO.cs b/libs/storage/Tsavorite/cs/src/core/Allocator/WorkQueueLIFO.cs index fc35ca739c..19c4cb5d09 100644 --- a/libs/storage/Tsavorite/cs/src/core/Allocator/WorkQueueLIFO.cs +++ b/libs/storage/Tsavorite/cs/src/core/Allocator/WorkQueueLIFO.cs @@ -3,95 +3,67 @@ using System; using System.Collections.Concurrent; -using System.Threading; using System.Threading.Tasks; namespace Tsavorite.core { /// - /// Shared work queue that ensures one worker at any given time. Uses LIFO ordering of work. + /// Shared work queue with a single work processor task loop. Uses LIFO ordering of work. /// /// internal sealed class WorkQueueLIFO : IDisposable { - const int kMaxQueueSize = 1 << 30; - readonly ConcurrentStack _queue; - readonly Action _work; - private int _count; - private bool _disposed; + readonly ConcurrentStack stack; + readonly Action work; + readonly SingleWaiterAutoResetEvent onWork; + readonly Task processQueue; + private bool disposed; public WorkQueueLIFO(Action work) { - _queue = new ConcurrentStack(); - _work = work; - _count = 0; - _disposed = false; + stack = new ConcurrentStack(); + this.work = work; + onWork = new() + { + RunContinuationsAsynchronously = true + }; + processQueue = Task.Run(ProcessQueue); } public void Dispose() { - _disposed = true; - // All future enqueue requests will no longer perform work after _disposed is set to true. - while (_count != 0) - Thread.Yield(); - // After this point, any previous work must have completed. Even if another enqueue request manipulates the - // count field, they are guaranteed to see disposed and not enqueue any actual work. + disposed = true; + onWork.Signal(); } /// - /// Enqueue work item, take ownership of draining the work queue - /// if needed + /// Add work item /// - /// Work to enqueue - /// Process work as separate task - /// whether the enqueue is successful. Enqueuing into a disposed WorkQueue will fail and the task will not be performed> - public bool EnqueueAndTryWork(T work, bool asTask) + /// Work item + /// Whether the add is successful> + public void AddWorkItem(T workItem) { - Interlocked.Increment(ref _count); - if (_disposed) - { - // Remove self from count in case Dispose() is actively waiting for completion - Interlocked.Decrement(ref _count); - return false; - } - - _queue.Push(work); - - // Try to take over work queue processing if needed - while (true) - { - int count = _count; - if (count >= kMaxQueueSize) return true; - if (Interlocked.CompareExchange(ref _count, count + kMaxQueueSize, count) == count) - break; - } - - if (asTask) - _ = Task.Run(ProcessQueue); - else - ProcessQueue(); - return true; + // Add the work item + stack.Push(workItem); + // Signal the processing logic to check for work + onWork.Signal(); } - private void ProcessQueue() + private async Task ProcessQueue() { // Process items in work queue - while (true) + while (!disposed) { - while (_queue.TryPop(out var workItem)) + while (stack.TryPop(out var workItem)) { try { - _work(workItem); + work(workItem); } catch { } - Interlocked.Decrement(ref _count); + if (disposed) return; } - - int count = _count; - if (count != kMaxQueueSize) continue; - if (Interlocked.CompareExchange(ref _count, 0, count) == count) - break; + await onWork.WaitAsync().ConfigureAwait(false); } } } diff --git a/libs/storage/Tsavorite/cs/src/core/ClientSession/BasicContext.cs b/libs/storage/Tsavorite/cs/src/core/ClientSession/BasicContext.cs index 8a8f546424..8f610740dd 100644 --- a/libs/storage/Tsavorite/cs/src/core/ClientSession/BasicContext.cs +++ b/libs/storage/Tsavorite/cs/src/core/ClientSession/BasicContext.cs @@ -429,15 +429,16 @@ public void Refresh() /// /// /// + /// LogicalAddress of the record to be copied /// Lower-bound address (addresses are searched from tail (high) to head (low); do not search for "future records" earlier than this) [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal Status CompactionCopyToTail(ref TKey key, ref TInput input, ref TValue value, ref TOutput output, long untilAddress) + internal Status CompactionCopyToTail(ref TKey key, ref TInput input, ref TValue value, ref TOutput output, long currentAddress, long untilAddress) { UnsafeResumeThread(); try { return store.CompactionConditionalCopyToTail, TStoreFunctions, TAllocator>>( - sessionFunctions, ref key, ref input, ref value, ref output, untilAddress); + sessionFunctions, ref key, ref input, ref value, ref output, currentAddress, untilAddress); } finally { @@ -452,15 +453,16 @@ internal Status CompactionCopyToTail(ref TKey key, ref TInput input, ref TValue /// /// /// + /// LogicalAddress of the record to be copied /// Lower-bound address (addresses are searched from tail (high) to head (low); do not search for "future records" earlier than this) [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal Status ConditionalScanPush(ScanCursorState scanCursorState, RecordInfo recordInfo, ref TKey key, ref TValue value, long untilAddress) + internal Status ConditionalScanPush(ScanCursorState scanCursorState, RecordInfo recordInfo, ref TKey key, ref TValue value, long currentAddress, long untilAddress) { UnsafeResumeThread(); try { return store.hlogBase.ConditionalScanPush, TStoreFunctions, TAllocator>>( - sessionFunctions, scanCursorState, recordInfo, ref key, ref value, untilAddress); + sessionFunctions, scanCursorState, recordInfo, ref key, ref value, currentAddress, untilAddress); } finally { diff --git a/libs/storage/Tsavorite/cs/src/core/ClientSession/ClientSession.cs b/libs/storage/Tsavorite/cs/src/core/ClientSession/ClientSession.cs index 528c0bf903..adc297ba11 100644 --- a/libs/storage/Tsavorite/cs/src/core/ClientSession/ClientSession.cs +++ b/libs/storage/Tsavorite/cs/src/core/ClientSession/ClientSession.cs @@ -467,7 +467,7 @@ public ITsavoriteScanIterator Iterate(long untilAddress = -1) => store.Iterate(functions, untilAddress); /// - /// Push iteration of all (distinct) live key-values stored in Tsavorite + /// Push iteration of all (distinct) live key-values stored in Tsavorite, using a temporary TsavoriteKV to ensure uniqueness /// /// Functions receiving pushed records /// Report records until this address (tail by default) @@ -476,6 +476,21 @@ public bool Iterate(ref TScanFunctions scanFunctions, long until where TScanFunctions : IScanIteratorFunctions => store.Iterate(functions, ref scanFunctions, untilAddress); + /// + /// Push iteration of all (distinct) live key-values stored in Tsavorite, using a lookup strategy to ensure uniqueness + /// + /// Functions receiving pushed records + /// Report records until this address (tail by default) + /// True if Iteration completed; false if Iteration ended early due to one of the TScanIterator reader functions returning false + public bool IterateLookup(ref TScanFunctions scanFunctions, long untilAddress = -1) + where TScanFunctions : IScanIteratorFunctions + { + if (untilAddress == -1) + untilAddress = store.Log.TailAddress; + var cursor = 0L; + return ScanCursor(ref cursor, count: long.MaxValue, scanFunctions, endAddress: untilAddress); + } + /// /// Push-scan the log from (which should be a valid address) and push up to records /// to the caller via for each Key that is not found at a higher address. diff --git a/libs/storage/Tsavorite/cs/src/core/Compaction/TsavoriteCompaction.cs b/libs/storage/Tsavorite/cs/src/core/Compaction/TsavoriteCompaction.cs index c2da08be48..4bef9caa87 100644 --- a/libs/storage/Tsavorite/cs/src/core/Compaction/TsavoriteCompaction.cs +++ b/libs/storage/Tsavorite/cs/src/core/Compaction/TsavoriteCompaction.cs @@ -54,7 +54,7 @@ private long CompactLookup 256) { storebContext.CompletePending(wait: true); @@ -135,7 +135,7 @@ private long CompactScan= untilAddress (scan boundary), we are safe to copy the old record // to the tail. We don't know the actualAddress of the key in the main kv, but we it will not be below untilAddress. - var status = storebContext.CompactionCopyToTail(ref iter3.GetKey(), ref input, ref iter3.GetValue(), ref output, untilAddress - 1); + var status = storebContext.CompactionCopyToTail(ref iter3.GetKey(), ref input, ref iter3.GetValue(), ref output, iter3.CurrentAddress, untilAddress - 1); if (status.IsPending && ++numPending > 256) { storebContext.CompletePending(wait: true); diff --git a/libs/storage/Tsavorite/cs/src/core/Index/CheckpointManagement/DeviceLogCommitCheckpointManager.cs b/libs/storage/Tsavorite/cs/src/core/Index/CheckpointManagement/DeviceLogCommitCheckpointManager.cs index 017820e73c..c7f0528e67 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/CheckpointManagement/DeviceLogCommitCheckpointManager.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/CheckpointManagement/DeviceLogCommitCheckpointManager.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; +using System.ComponentModel; using System.IO; using System.Linq; using System.Threading; @@ -426,7 +427,8 @@ private unsafe void IOCallback(uint errorCode, uint numBytes, object context) { if (errorCode != 0) { - logger?.LogError("OverlappedStream GetQueuedCompletionStatus error: {0}", errorCode); + var errorMessage = new Win32Exception((int)errorCode).Message; + logger.LogError("[DeviceLogCheckpointManager] OverlappedStream GetQueuedCompletionStatus error: {errorCode} msg: {errorMessage}", errorCode, errorMessage); } semaphore.Release(); } diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Common/LogSizeTracker.cs b/libs/storage/Tsavorite/cs/src/core/Index/Common/LogSizeTracker.cs index 0695ab3b1c..f1994158db 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Common/LogSizeTracker.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Common/LogSizeTracker.cs @@ -197,11 +197,11 @@ async Task ResizerTask(CancellationToken token) /// private void ResizeIfNeeded(CancellationToken token) { - // Include memory size from the log (logAccessor.MemorySizeBytes) + heap size (logSize.Total) to check utilization - if (logSize.Total + logAccessor.MemorySizeBytes > highTargetSize) + // Monitor the heap size + if (logSize.Total > highTargetSize) { - logger?.LogDebug("Heap size {totalLogSize} + log {MemorySizeBytes} > target {highTargetSize}. Alloc: {AllocatedPageCount} EPC: {EmptyPageCount}", logSize.Total, logAccessor.MemorySizeBytes, highTargetSize, logAccessor.AllocatedPageCount, logAccessor.EmptyPageCount); - while (logSize.Total + logAccessor.MemorySizeBytes > highTargetSize && + logger?.LogDebug("Heap size {totalLogSize} > target {highTargetSize}. Alloc: {AllocatedPageCount} EPC: {EmptyPageCount}", logSize.Total, highTargetSize, logAccessor.AllocatedPageCount, logAccessor.EmptyPageCount); + while (logSize.Total > highTargetSize && logAccessor.EmptyPageCount < logAccessor.MaxEmptyPageCount) { token.ThrowIfCancellationRequested(); @@ -216,10 +216,10 @@ private void ResizeIfNeeded(CancellationToken token) logger?.LogDebug("Increasing empty page count to {EmptyPageCount}", logAccessor.EmptyPageCount); } } - else if (logSize.Total + logAccessor.MemorySizeBytes < lowTargetSize) + else if (logSize.Total < lowTargetSize) { - logger?.LogDebug("Heap size {totalLogSize} + log {MemorySizeBytes} < target {lowTargetSize}. Alloc: {AllocatedPageCount} EPC: {EmptyPageCount}", logSize.Total, logAccessor.MemorySizeBytes, lowTargetSize, logAccessor.AllocatedPageCount, logAccessor.EmptyPageCount); - while (logSize.Total + logAccessor.MemorySizeBytes < lowTargetSize && + logger?.LogDebug("Heap size {totalLogSize} < target {lowTargetSize}. Alloc: {AllocatedPageCount} EPC: {EmptyPageCount}", logSize.Total, lowTargetSize, logAccessor.AllocatedPageCount, logAccessor.EmptyPageCount); + while (logSize.Total < lowTargetSize && logAccessor.EmptyPageCount > logAccessor.MinEmptyPageCount) { token.ThrowIfCancellationRequested(); diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/BlockAllocate.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/BlockAllocate.cs index 6fc1b38727..9b0173c6ab 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/BlockAllocate.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/BlockAllocate.cs @@ -20,7 +20,7 @@ private static bool TryBlockAllocate( out OperationStatus internalStatus) { pendingContext.flushEvent = allocator.FlushEvent; - logicalAddress = allocator.TryAllocate(recordSize); + logicalAddress = allocator.TryAllocateRetryNow(recordSize); if (logicalAddress > 0) { pendingContext.flushEvent = default; @@ -28,24 +28,21 @@ private static bool TryBlockAllocate( return true; } - if (logicalAddress == 0) - { - // We expect flushEvent to be signaled. - internalStatus = OperationStatus.ALLOCATE_FAILED; - return false; - } - - // logicalAddress is < 0 so we do not expect flushEvent to be signaled; return RETRY_LATER to refresh the epoch. - pendingContext.flushEvent = default; - allocator.TryComplete(); - internalStatus = OperationStatus.RETRY_LATER; + // logicalAddress less than 0 (RETRY_NOW) should already have been handled + Debug.Assert(logicalAddress == 0); + // We expect flushEvent to be signaled. + internalStatus = OperationStatus.ALLOCATE_FAILED; return false; } + /// Options for TryAllocateRecord. internal struct AllocateOptions { + /// If true, use the non-revivification recycling of records that failed to CAS and are carried in PendingContext through RETRY. internal bool Recycle; - internal bool IgnoreHeiAddress; + + /// If true, the source record is elidable so we can try to elide from the tag chain (and transfer it to the FreeList if we're doing Revivification). + internal bool ElideSourceRecord; }; [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -64,7 +61,7 @@ bool TryAllocateRecord(TSes return true; if (RevivificationManager.UseFreeRecordPool) { - if (!options.IgnoreHeiAddress && stackCtx.hei.Address >= minMutableAddress) + if (!options.ElideSourceRecord && stackCtx.hei.Address >= minMutableAddress) minRevivAddress = stackCtx.hei.Address; if (sessionFunctions.Ctx.IsInV1) { diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ConditionalCopyToTail.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ConditionalCopyToTail.cs index 525f6dc6f7..04bf1aeff3 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ConditionalCopyToTail.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ConditionalCopyToTail.cs @@ -70,7 +70,7 @@ private OperationStatus ConditionalCopyToTail(sessionFunctions, ref key, ref stackCtx2, minAddress, out status, out needIO)) + if (TryFindRecordInMainLogForConditionalOperation(sessionFunctions, ref key, ref stackCtx2, stackCtx.recSrc.LogicalAddress, minAddress, out status, out needIO)) return OperationStatus.SUCCESS; } while (HandleImmediateNonPendingRetryStatus(status, sessionFunctions)); @@ -90,7 +90,7 @@ private OperationStatus ConditionalCopyToTail(TSessionFunctionsWrapper sessionFunctions, ref TKey key, ref TInput input, ref TValue value, - ref TOutput output, long minAddress) + ref TOutput output, long currentAddress, long minAddress) where TSessionFunctionsWrapper : ISessionFunctionsWrapper { Debug.Assert(epoch.ThisInstanceProtected(), "This is called only from Compaction so the epoch should be protected"); @@ -101,7 +101,7 @@ internal Status CompactionConditionalCopyToTail(sessionFunctions, ref key, ref stackCtx, minAddress, out status, out needIO)) + if (TryFindRecordInMainLogForConditionalOperation(sessionFunctions, ref key, ref stackCtx, currentAddress, minAddress, out status, out needIO)) return Status.CreateFound(); } while (sessionFunctions.Store.HandleImmediateNonPendingRetryStatus(status, sessionFunctions)); diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ContinuePending.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ContinuePending.cs index 5af24c5a87..a1c2a547d8 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ContinuePending.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/ContinuePending.cs @@ -293,7 +293,7 @@ internal OperationStatus ContinuePendingConditionalCopyToTail(sessionFunctions, ref key, ref stackCtx, minAddress, out internalStatus, out bool needIO)) + if (TryFindRecordInMainLogForConditionalOperation(sessionFunctions, ref key, ref stackCtx, currentAddress: request.logicalAddress, minAddress, out internalStatus, out bool needIO)) return OperationStatus.SUCCESS; if (!OperationStatusUtils.IsRetry(internalStatus)) { @@ -344,7 +344,7 @@ internal OperationStatus ContinuePendingConditionalScanPush(sessionFunctions, pendingContext.scanCursorState, pendingContext.recordInfo, ref pendingContext.key.Get(), ref pendingContext.value.Get(), - minAddress: pendingContext.InitialLatestLogicalAddress + 1); + currentAddress: request.logicalAddress, minAddress: pendingContext.InitialLatestLogicalAddress + 1); // ConditionalScanPush has already called HandleOperationStatus, so return SUCCESS here. return OperationStatus.SUCCESS; diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/FindRecord.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/FindRecord.cs index ab83c64b58..335197d956 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/FindRecord.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/FindRecord.cs @@ -49,41 +49,46 @@ internal bool TryFindRecordInMainLog(ref TKey key, ref OperationStackContext(TSessionFunctionsWrapper sessionFunctions, - ref TKey key, ref OperationStackContext stackCtx, long minAddress, out OperationStatus internalStatus, out bool needIO) + ref TKey key, ref OperationStackContext stackCtx, long currentAddress, long minAddress, out OperationStatus internalStatus, out bool needIO) where TSessionFunctionsWrapper : ISessionFunctionsWrapper { + if (!FindTag(ref stackCtx.hei)) + { + internalStatus = OperationStatus.NOTFOUND; + return needIO = false; + } + internalStatus = OperationStatus.SUCCESS; + if (!stackCtx.hei.IsReadCache) + { + // If the address in the HashBucketEntry is the current address, there'll be no record above it, so return false (not found). + // If there are no valid records in the HashBucketEntry (minAddress is inclusive), return false (not found). + if (stackCtx.hei.Address == currentAddress || stackCtx.hei.Address < minAddress || stackCtx.hei.Address < hlogBase.BeginAddress) + { + stackCtx.SetRecordSourceToHashEntry(hlogBase); + return needIO = false; + } + if (stackCtx.hei.Address < hlogBase.HeadAddress) + { + stackCtx.SetRecordSourceToHashEntry(hlogBase); + needIO = true; + return false; + } + } + if (RevivificationManager.UseFreeRecordPool) { // The TransientSLock here is necessary only for the tag chain to avoid record elision/revivification during traceback. - if (!FindTagAndTryTransientSLock(sessionFunctions, ref key, ref stackCtx, out internalStatus)) + if (!TryTransientSLock(sessionFunctions, ref key, ref stackCtx, out internalStatus)) return needIO = false; } else - { - if (!FindTag(ref stackCtx.hei)) - { - internalStatus = OperationStatus.NOTFOUND; - return needIO = false; - } stackCtx.SetRecordSourceToHashEntry(hlogBase); - } try { - // minAddress is inclusive - if (!stackCtx.hei.IsReadCache) - { - if (stackCtx.hei.Address < minAddress) - return needIO = false; - if (stackCtx.hei.Address < hlogBase.HeadAddress) - { - needIO = stackCtx.hei.Address >= hlogBase.BeginAddress; - return false; - } - } - if (UseReadCache) SkipReadCache(ref stackCtx, out _); // Where this is called, we have no dependency on source addresses so we don't care if it Refreshed diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/InternalRMW.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/InternalRMW.cs index ce614bd852..0652f03c90 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/InternalRMW.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/Implementation/InternalRMW.cs @@ -406,9 +406,7 @@ private OperationStatus CreateNewRecordRMW(sessionFunctions, ref stackCtx, ref srcRecordInfo) + ElideSourceRecord = stackCtx.recSrc.HasMainLogSrc && CanElide(sessionFunctions, ref stackCtx, ref srcRecordInfo) }; if (!TryAllocateRecord(sessionFunctions, ref pendingContext, ref stackCtx, actualSize, ref allocatedSize, keySize, allocOptions, @@ -416,7 +414,7 @@ private OperationStatus CreateNewRecordRMW= hlogBase.HeadAddress) + { + // Safe Read-Only Region: Create a record in the mutable region, but set srcRecordInfo in case we are eliding. + if (stackCtx.recSrc.HasMainLogSrc) + srcRecordInfo = ref stackCtx.recSrc.GetInfo(); + goto CreateNewRecord; + } // No record exists, or readonly or below. Drop through to create new record. Debug.Assert(!sessionFunctions.IsManualLocking || LockTable.IsLockedExclusive(ref stackCtx.hei), "A Lockable-session Upsert() of an on-disk or non-existent key requires a LockTable lock"); @@ -303,9 +310,7 @@ private OperationStatus CreateNewRecordUpsert(sessionFunctions, ref stackCtx, ref srcRecordInfo) + ElideSourceRecord = stackCtx.recSrc.HasMainLogSrc && CanElide(sessionFunctions, ref stackCtx, ref srcRecordInfo) }; if (!TryAllocateRecord(sessionFunctions, ref pendingContext, ref stackCtx, actualSize, ref allocatedSize, keySize, allocOptions, @@ -313,7 +318,7 @@ private OperationStatus CreateNewRecordUpsert public long MemorySizeBytes => ((long)(allocatorBase.AllocatedPageCount + allocator.OverflowPageCount)) << allocatorBase.LogPageSizeBits; + /// + /// Maximum memory size in bytes + /// + public long MaxMemorySizeBytes => allocatorBase.MaxMemorySizeBytes; + /// /// Number of pages allocated /// diff --git a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/TsavoriteIterator.cs b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/TsavoriteIterator.cs index 6d4b0b95b1..640575d095 100644 --- a/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/TsavoriteIterator.cs +++ b/libs/storage/Tsavorite/cs/src/core/Index/Tsavorite/TsavoriteIterator.cs @@ -51,26 +51,6 @@ public bool Iterate(TFunc scanFunctions.OnStop(!stop, numRecords); return !stop; } - - /// - /// Iterator for all (distinct) live key-values stored in Tsavorite - /// - /// Report records until this address (tail by default) - /// Tsavorite iterator - [Obsolete("Invoke Iterate() on a client session (ClientSession), or use store.Iterate overload with Functions provided as parameter")] - public ITsavoriteScanIterator Iterate(long untilAddress = -1) - => throw new TsavoriteException("Invoke Iterate() on a client session (ClientSession), or use store.Iterate overload with Functions provided as parameter"); - - /// - /// Iterator for all (distinct) live key-values stored in Tsavorite - /// - /// User provided compaction functions (see ). - /// Report records until this address (tail by default) - /// Tsavorite iterator - [Obsolete("Invoke Iterate() on a client session (ClientSession), or use store.Iterate overload with Functions provided as parameter")] - public ITsavoriteScanIterator Iterate(CompactionFunctions compactionFunctions, long untilAddress = -1) - where CompactionFunctions : ICompactionFunctions - => throw new TsavoriteException("Invoke Iterate() on a client session (ClientSession), or use store.Iterate overload with Functions provided as parameter"); } internal sealed class TsavoriteKVIterator : ITsavoriteScanIterator diff --git a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLog.cs b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLog.cs index 6b7c358251..50d4ced698 100644 --- a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLog.cs +++ b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLog.cs @@ -3,11 +3,9 @@ using System; using System.Buffers; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.IO; -using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -76,11 +74,6 @@ public sealed class TsavoriteLog : IDisposable /// public long SafeTailAddress; - /// - /// Dictionary of recovered iterators and their committed until addresses - /// - public Dictionary RecoveredIterators { get; private set; } - /// /// Log committed until address /// @@ -111,20 +104,13 @@ public sealed class TsavoriteLog : IDisposable /// internal CompletionEvent FlushEvent => allocator.FlushEvent; - /// - /// Table of persisted iterators - /// - internal readonly ConcurrentDictionary PersistedIterators = new(); - /// /// Committed view of commitMetadataVersion /// private long persistedCommitNum; - internal Dictionary LastPersistedIterators; - /// - /// Numer of references to log, including itself + /// Number of references to log, including itself /// Used to determine disposability of log /// internal int logRefCount = 1; @@ -132,9 +118,34 @@ public sealed class TsavoriteLog : IDisposable readonly ILogger logger; /// - /// Whether we refresh safe tail as records are inserted + /// SafeTailAddress refresh frequency in milliseconds. -1 => disabled; 0 => immediate refresh after every enqueue, >1 => refresh period in milliseconds. + /// + readonly int SafeTailRefreshFrequencyMs; + + /// + /// CTS to allow cancellation of the safe tail refresh background task, called during Dispose /// - readonly bool AutoRefreshSafeTailAddress; + readonly CancellationTokenSource safeTailRefreshTaskCts; + + /// + /// Last captured safe tail address before epoch bump + /// + long safeTailRefreshLastTailAddress = 0; + + /// + /// Events to control callback execution + /// + readonly SingleWaiterAutoResetEvent safeTailRefreshCallbackCompleted, safeTailRefreshEntryEnqueued; + + /// + /// Task corresponding to safe tail refresh + /// + readonly Task safeTailRefreshTask; + + /// + /// Action for bump epoch to refresh safe tail + /// + readonly Action periodicRefreshSafeTailAddressBumpCallbackAction; /// /// Callback when safe tail shifts @@ -147,9 +158,14 @@ public sealed class TsavoriteLog : IDisposable readonly bool AutoCommit; /// - /// Whether there is an ongoing auto refresh safe tail + /// Maximum memory size in bytes + /// + public long MaxMemorySizeBytes => allocator.MaxMemorySizeBytes; + + /// + /// Actual memory used by log /// - int _ongoingAutoRefreshSafeTailAddress = 0; + public long MemorySizeBytes => ((long)(allocator.AllocatedPageCount + allocator.OverflowPageCount)) << allocator.LogPageSizeBits; /// /// Create new log instance @@ -169,7 +185,6 @@ public TsavoriteLog(TsavoriteLogSettings logSettings, ILogger logger = null) private TsavoriteLog(TsavoriteLogSettings logSettings, bool syncRecover, ILogger logger = null) { this.logger = logger; - AutoRefreshSafeTailAddress = logSettings.AutoRefreshSafeTailAddress; AutoCommit = logSettings.AutoCommit; logCommitManager = logSettings.LogCommitManager ?? new DeviceLogCommitCheckpointManager @@ -221,6 +236,123 @@ private TsavoriteLog(TsavoriteLogSettings logSettings, bool syncRecover, ILogger } catch { } } + + // Set up safe tail refresh + SafeTailRefreshFrequencyMs = logSettings.SafeTailRefreshFrequencyMs; + if (SafeTailRefreshFrequencyMs >= 0) + { + safeTailRefreshCallbackCompleted = new() + { + RunContinuationsAsynchronously = true + }; + if (SafeTailRefreshFrequencyMs == 0) + { + safeTailRefreshEntryEnqueued = new() + { + RunContinuationsAsynchronously = true + }; + } + safeTailRefreshTaskCts = new(); + periodicRefreshSafeTailAddressBumpCallbackAction = PeriodicRefreshSafeTailAddressBumpCallback; + safeTailRefreshTask = Task.Run(SafeTailRefreshWorker); + } + } + + async Task SafeTailRefreshWorker() + { + try + { + var token = safeTailRefreshTaskCts.Token; + + // Outer loop makes the worker wake up every so often (either delay or enqueue-signal) + // and try to move SafeTailAddress towards TailAddress + while (!token.IsCancellationRequested) + { + // Inner loop keeps moving SafeTailAddress towards TailAddress until we have + // caught up and there is no more movement necessary. + while (!token.IsCancellationRequested) + { + try + { + // Resume epoch protection + epoch.Resume(); + + // Capture the tail address before epoch refresh, so that the bump action + // knows what the new SafeTailAddress should be set to. + safeTailRefreshLastTailAddress = TailAddress; + + // Break out of inner loop if there is no more work to do + if (safeTailRefreshLastTailAddress <= SafeTailAddress) + break; + + // Bump epoch with an action to update SafeTailAddress to the captured safeTailRefreshLastTailAddress + epoch.BumpCurrentEpoch(periodicRefreshSafeTailAddressBumpCallbackAction); + } + finally + { + // Suspend epoch protection + epoch.Suspend(); + } + // Wait for the bump epoch action to finish executing, so we can re-check + await safeTailRefreshCallbackCompleted.WaitAsync().ConfigureAwait(false); + } + // Work is done, wait for the next iteration of the worker loop + if (SafeTailRefreshFrequencyMs > 0) + { + await Task.Delay(SafeTailRefreshFrequencyMs, token).ConfigureAwait(false); + } + else + { + await safeTailRefreshEntryEnqueued.WaitAsync().ConfigureAwait(false); + } + } + } + catch (Exception e) + { + logger?.LogError(e, "Exception encountered during PeriodicSafeTailRefreshRunner"); + } + } + + void PeriodicRefreshSafeTailAddressBumpCallback() + { + try + { + if (Utility.MonotonicUpdate(ref SafeTailAddress, safeTailRefreshLastTailAddress, out long oldSafeTailAddress)) + { + var tcs = refreshUncommittedTcs; + if (tcs != null && Interlocked.CompareExchange(ref refreshUncommittedTcs, null, tcs) == tcs) + tcs.SetResult(Empty.Default); + var _callback = SafeTailShiftCallback; + if (_callback != null || activeSingleIterators != null) + { + // We invoke callback outside epoch protection + bool isProtected = epoch.ThisInstanceProtected(); + if (isProtected) epoch.Suspend(); + try + { + // Notify waiting single iterators, if any + var _asi = activeSingleIterators; + if (_asi != null) + { + foreach (var iter in _asi) + { + iter.Signal(); + } + } + // Invoke callback, if any + _callback?.Invoke(oldSafeTailAddress, safeTailRefreshLastTailAddress); + } + finally + { + if (isProtected) epoch.Resume(); + } + } + } + } + finally + { + safeTailRefreshCallbackCompleted.Signal(); + } } /// @@ -284,12 +416,10 @@ public void Recover(long requestedCommitNum = -1) if (CommittedUntilAddress > BeginAddress) throw new TsavoriteException($"Already recovered until address {CommittedUntilAddress}"); - Dictionary it; if (requestedCommitNum == -1) - RestoreLatest(out it, out RecoveredCookie); + RestoreLatest(out RecoveredCookie); else - RestoreSpecificCommit(requestedCommitNum, out it, out RecoveredCookie); - RecoveredIterators = it; + RestoreSpecificCommit(requestedCommitNum, out RecoveredCookie); } /// @@ -302,8 +432,7 @@ public static async ValueTask CreateAsync(TsavoriteLogSettings log var log = new TsavoriteLog(logSettings, false); if (logSettings.TryRecoverLatest) { - var (it, cookie) = await log.RestoreLatestAsync(cancellationToken).ConfigureAwait(false); - log.RecoveredIterators = it; + var cookie = await log.RestoreLatestAsync(cancellationToken).ConfigureAwait(false); log.RecoveredCookie = cookie; } return log; @@ -355,6 +484,9 @@ public void CompleteLog(bool spinWait = false) internal void TrueDispose() { + safeTailRefreshTaskCts?.Cancel(); + safeTailRefreshCallbackCompleted?.Signal(); + safeTailRefreshEntryEnqueued?.Signal(); commitQueue.Dispose(); commitTcs.TrySetException(new ObjectDisposedException("Log has been disposed")); allocator.Dispose(); @@ -437,8 +569,8 @@ public int UnsafeGetLogPageSizeBits() /// /// Get read only lag address /// - public long UnsafeGetReadOnlyLagAddress() - => allocator.GetReadOnlyLagAddress(); + public long UnsafeGetReadOnlyAddressLagOffset() + => allocator.GetReadOnlyAddressLagOffset(); /// /// Enqueue batch of entries to log (in memory) - no guarantee of flush/commit @@ -514,7 +646,7 @@ public unsafe bool TryEnqueue(T entry, out long logicalAddress) where T : ILo var physicalAddress = allocator.GetPhysicalAddress(logicalAddress); entry.SerializeTo(new Span((void*)(headerSize + physicalAddress), length)); SetHeader(length, (byte*)physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -560,7 +692,7 @@ public unsafe bool TryEnqueue(IEnumerable entries, out long logicalAddress SetHeader(length, (byte*)physicalAddress); physicalAddress += Align(length) + headerSize; } - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -597,7 +729,7 @@ public unsafe bool TryEnqueue(byte[] entry, out long logicalAddress) fixed (byte* bp = entry) Buffer.MemoryCopy(bp, (void*)(headerSize + physicalAddress), length, length); SetHeader(length, (byte*)physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -636,7 +768,7 @@ public unsafe bool UnsafeTryEnqueueRaw(ReadOnlySpan entryBytes, bool noCom var physicalAddress = allocator.GetPhysicalAddress(logicalAddress); entryBytes.CopyTo(new Span((byte*)physicalAddress, length)); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit && !noCommit) Commit(); return true; @@ -672,7 +804,7 @@ public unsafe bool TryEnqueue(ReadOnlySpan entry, out long logicalAddress) fixed (byte* bp = &entry.GetPinnableReference()) Buffer.MemoryCopy(bp, (void*)(headerSize + physicalAddress), length, length); SetHeader(length, (byte*)physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -698,7 +830,7 @@ public unsafe void Enqueue(THeader userHeader, out long logicalAddress) var physicalAddress = (byte*)allocator.GetPhysicalAddress(logicalAddress); *(THeader*)(physicalAddress + headerSize) = userHeader; SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -725,7 +857,7 @@ public unsafe void Enqueue(THeader userHeader, ref SpanByte item, out l *(THeader*)(physicalAddress + headerSize) = userHeader; item.CopyTo(physicalAddress + headerSize + sizeof(THeader)); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -754,7 +886,7 @@ public unsafe void Enqueue(THeader userHeader, ref SpanByte item1, ref item1.CopyTo(physicalAddress + headerSize + sizeof(THeader)); item2.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -785,7 +917,7 @@ public unsafe void Enqueue(THeader userHeader, ref SpanByte item1, ref item2.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize); item3.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize + item2.TotalSize); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -825,7 +957,7 @@ public unsafe void Enqueue(THeader userHeader, ref SpanByte[] items, ou } SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -851,7 +983,7 @@ public unsafe void Enqueue(byte userHeader, ref SpanByte item, out long logicalA *physicalAddress = userHeader; item.CopyTo(physicalAddress + sizeof(byte)); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); } @@ -862,28 +994,23 @@ private long AllocateBlock(int recordSize) while (true) { var flushEvent = allocator.FlushEvent; - var logicalAddress = allocator.TryAllocate(recordSize); + var logicalAddress = allocator.TryAllocateRetryNow(recordSize); if (logicalAddress > 0) return logicalAddress; - if (logicalAddress == 0) + // logicalAddress less than 0 (RETRY_NOW) should already have been handled + Debug.Assert(logicalAddress == 0); + + epoch.Suspend(); + if (cannedException != null) throw cannedException; + try { - epoch.Suspend(); - if (cannedException != null) throw cannedException; - try - { - flushEvent.Wait(); - } - finally - { - epoch.Resume(); - } + flushEvent.Wait(); + } + finally + { + epoch.Resume(); } - - // logicalAddress is < 0 so we do not expect flushEvent to be signaled; refresh the epoch and retry now - allocator.TryComplete(); - epoch.ProtectAndDrain(); - Thread.Yield(); } } @@ -919,7 +1046,7 @@ public unsafe bool TryEnqueue(THeader userHeader, ref SpanByte item1, r item1.CopyTo(physicalAddress + headerSize + sizeof(THeader)); item2.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -959,7 +1086,7 @@ public unsafe bool TryEnqueue(THeader userHeader, ref SpanByte item1, r item2.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize); item3.CopyTo(physicalAddress + headerSize + sizeof(THeader) + item1.TotalSize + item2.TotalSize); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -994,7 +1121,7 @@ public unsafe bool TryEnqueue(byte userHeader, ref SpanByte item, out long logic *physicalAddress = userHeader; item.CopyTo(physicalAddress + sizeof(byte)); SetHeader(length, physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -1246,7 +1373,7 @@ public async ValueTask WaitForCommitAsync(long untilAddress = 0, long commitNum /// true if there's more data available to be read; false if there will never be more data (log has been shutdown) public async ValueTask WaitUncommittedAsync(long nextAddress, CancellationToken token = default) { - Debug.Assert(AutoRefreshSafeTailAddress); + Debug.Assert(SafeTailRefreshFrequencyMs >= 0); if (nextAddress < SafeTailAddress) return true; @@ -1286,13 +1413,13 @@ public async ValueTask WaitUncommittedAsync(long nextAddress, Cancellation /// If true, spin-wait until commit completes. Otherwise, issue commit and return immediately. /// whether there is anything to commit. - public void Commit(bool spinWait = false) + public void Commit(bool spinWait = false, byte[] cookie = null) { // Take a lower-bound of the content of this commit in case our request is filtered but we need to spin var tail = TailAddress; var lastCommit = commitNum; - var success = CommitInternal(out var actualTail, out var actualCommitNum, true, null, -1, null); + var success = CommitInternal(out var actualTail, out var actualCommitNum, cookie == null, cookie, -1, null); if (!spinWait) return; if (success) WaitForCommit(actualTail, actualCommitNum); @@ -1336,7 +1463,7 @@ public bool CommitStrongly(out long commitTail, out long actualCommitNum, bool s /// ongoing commit fails. /// /// - public async ValueTask CommitAsync(CancellationToken token = default) + public async ValueTask CommitAsync(byte[] cookie = null, CancellationToken token = default) { token.ThrowIfCancellationRequested(); @@ -1345,7 +1472,7 @@ public async ValueTask CommitAsync(CancellationToken token = default) var lastCommit = commitNum; var task = CommitTask; - var success = CommitInternal(out var actualTail, out var actualCommitNum, true, null, -1, null); + var success = CommitInternal(out var actualTail, out var actualCommitNum, cookie == null, cookie, -1, null); if (success) { @@ -1371,7 +1498,7 @@ public async ValueTask CommitAsync(CancellationToken token = default) /// from prevCommitTask to current fails. /// /// - public async ValueTask> CommitAsync(Task prevCommitTask, CancellationToken token = default) + public async ValueTask> CommitAsync(Task prevCommitTask, byte[] cookie = null, CancellationToken token = default) { token.ThrowIfCancellationRequested(); @@ -1381,7 +1508,7 @@ public async ValueTask> CommitAsync(Task /// /// - /// - public void UnsafeShiftBeginAddress(long untilAddress, bool snapToPageStart = false, bool truncateLog = false, bool noFlush = false) + public void UnsafeShiftBeginAddress(long untilAddress, bool snapToPageStart = false, bool truncateLog = false) { if (Utility.MonotonicUpdate(ref beginAddress, untilAddress, out _)) { @@ -1819,7 +1945,7 @@ public void UnsafeShiftBeginAddress(long untilAddress, bool snapToPageStart = fa { if (!epochProtected) epoch.Resume(); - allocator.ShiftBeginAddress(untilAddress, truncateLog, noFlush); + allocator.ShiftBeginAddress(untilAddress, truncateLog, noFlush: true); } finally { @@ -1845,40 +1971,72 @@ public void TruncateUntilPageStart(long untilAddress) /// /// Begin address for scan. /// End address for scan (or long.MaxValue for tailing). - /// Name of iterator, if we need to persist/recover it (default null - do not persist). /// Whether to recover named iterator from latest commit (if exists). If false, iterator starts from beginAddress. /// Use single or double buffering /// Whether we scan uncommitted data /// /// - public TsavoriteLogScanIterator Scan(long beginAddress, long endAddress, string name = null, bool recover = true, ScanBufferingMode scanBufferingMode = ScanBufferingMode.DoublePageBuffering, bool scanUncommitted = false, ILogger logger = null) + public TsavoriteLogScanIterator Scan(long beginAddress, long endAddress, bool recover = true, ScanBufferingMode scanBufferingMode = ScanBufferingMode.DoublePageBuffering, bool scanUncommitted = false, ILogger logger = null) { if (readOnlyMode) { scanBufferingMode = ScanBufferingMode.SinglePageBuffering; - if (name != null) - throw new TsavoriteException("Cannot use named iterators with read-only TsavoriteLog"); if (scanUncommitted) throw new TsavoriteException("Cannot use scanUncommitted with read-only TsavoriteLog"); } - if (scanUncommitted && !AutoRefreshSafeTailAddress) - throw new TsavoriteException("Cannot use scanUncommitted without setting AutoRefreshSafeTailAddress to true in TsavoriteLog settings"); + if (scanUncommitted && SafeTailRefreshFrequencyMs < 0) + throw new TsavoriteException("Cannot use scanUncommitted without setting SafeTailRefreshFrequencyMs to a non-negative value in TsavoriteLog settings"); - TsavoriteLogScanIterator iter; - if (recover && name != null && RecoveredIterators != null && RecoveredIterators.ContainsKey(name)) - iter = new TsavoriteLogScanIterator(this, allocator, RecoveredIterators[name], endAddress, getMemory, scanBufferingMode, epoch, headerSize, name, scanUncommitted, logger: logger); - else - iter = new TsavoriteLogScanIterator(this, allocator, beginAddress, endAddress, getMemory, scanBufferingMode, epoch, headerSize, name, scanUncommitted, logger: logger); + var iter = new TsavoriteLogScanIterator(this, allocator, beginAddress, endAddress, getMemory, scanBufferingMode, epoch, headerSize, scanUncommitted, logger: logger); + + if (Interlocked.Increment(ref logRefCount) == 1) + throw new TsavoriteException("Cannot scan disposed log instance"); + return iter; + } + + List activeSingleIterators; + + public void RemoveIterator(TsavoriteLogScanSingleIterator iterator) + { + lock (this) + { + if (activeSingleIterators != null) + { + List newList = null; + foreach (var it in activeSingleIterators) + { + if (it != iterator) + { + newList ??= []; + newList.Add(it); + } + } + activeSingleIterators = newList; + } + } + } - if (name != null) + public TsavoriteLogScanSingleIterator ScanSingle(long beginAddress, long endAddress, bool recover = true, ScanBufferingMode scanBufferingMode = ScanBufferingMode.DoublePageBuffering, bool scanUncommitted = false, ILogger logger = null) + { + if (readOnlyMode) { - if (name.Length > 20) - throw new TsavoriteException("Max length of iterator name is 20 characters"); - if (PersistedIterators.ContainsKey(name)) - logger?.LogDebug("Iterator name exists, overwriting"); - PersistedIterators[name] = iter; + scanBufferingMode = ScanBufferingMode.SinglePageBuffering; + + if (scanUncommitted) + throw new TsavoriteException("Cannot use scanUncommitted with read-only TsavoriteLog"); + } + + if (scanUncommitted && SafeTailRefreshFrequencyMs < 0) + throw new TsavoriteException("Cannot use scanUncommitted without setting SafeTailRefreshFrequencyMs to a non-negative value in TsavoriteLog settings"); + + var iter = new TsavoriteLogScanSingleIterator(this, allocator, beginAddress, endAddress, getMemory, scanBufferingMode, epoch, headerSize, scanUncommitted, logger: logger); + + lock (this) + { + List newList = activeSingleIterators == null ? new() { iter } : new(activeSingleIterators) { iter }; + activeSingleIterators = newList; } if (Interlocked.Increment(ref logRefCount) == 1) @@ -1977,72 +2135,13 @@ public async ValueTask ReadRecordLengthAsync(long address, CancellationToke } /// - /// Initiate auto refresh safe tail address, called with epoch protection + /// Trigger refresh of safe tail address /// private void DoAutoRefreshSafeTailAddress() { - if (_ongoingAutoRefreshSafeTailAddress == 0 && Interlocked.CompareExchange(ref _ongoingAutoRefreshSafeTailAddress, 1, 0) == 0) - AutoRefreshSafeTailAddressRunner(false); - } - - private void EpochProtectAutoRefreshSafeTailAddressRunner() - { - try - { - epoch.Resume(); - AutoRefreshSafeTailAddressRunner(false); - } - finally - { - epoch.Suspend(); - } - } - - private void AutoRefreshSafeTailAddressRunner(bool recurse) - { - long tail = 0; - do - { - tail = TailAddress; - if (tail > SafeTailAddress) - { - if (recurse) - Task.Run(EpochProtectAutoRefreshSafeTailAddressRunner); - else - epoch.BumpCurrentEpoch(() => AutoRefreshSafeTailAddressBumpCallback(tail)); - return; - } - _ongoingAutoRefreshSafeTailAddress = 0; - } while (tail > SafeTailAddress && _ongoingAutoRefreshSafeTailAddress == 0 && Interlocked.CompareExchange(ref _ongoingAutoRefreshSafeTailAddress, 1, 0) == 0); - } - - private void AutoRefreshSafeTailAddressBumpCallback(long tailAddress) - { - if (Utility.MonotonicUpdate(ref SafeTailAddress, tailAddress, out long oldSafeTailAddress)) - { - var tcs = refreshUncommittedTcs; - if (tcs != null && Interlocked.CompareExchange(ref refreshUncommittedTcs, null, tcs) == tcs) - tcs.SetResult(Empty.Default); - var _callback = SafeTailShiftCallback; - if (_callback != null) - { - // We invoke callback outside epoch protection - bool isProtected = epoch.ThisInstanceProtected(); - if (isProtected) epoch.Suspend(); - try - { - _callback.Invoke(oldSafeTailAddress, tailAddress); - } - finally - { - if (isProtected) epoch.Resume(); - } - } - } - AutoRefreshSafeTailAddressRunner(true); + safeTailRefreshEntryEnqueued?.Signal(); } - [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int Align(int length) { @@ -2056,7 +2155,7 @@ private void CommitCallback(CommitInfo commitInfo) { // Using count is safe as a fast filtering mechanism to reduce number of invocations despite concurrency if (ongoingCommitRequests.Count == 0 && commitInfo.ErrorCode == 0) return; - commitQueue.EnqueueAndTryWork(commitInfo, asTask: true); + commitQueue.AddWorkItem(commitInfo); } private unsafe bool TryEnqueueCommitRecord(ref TsavoriteLogRecoveryInfo info) @@ -2084,7 +2183,7 @@ private unsafe bool TryEnqueueCommitRecord(ref TsavoriteLogRecoveryInfo info) fixed (byte* bp = entryBody) Buffer.MemoryCopy(bp, (void*)(headerSize + physicalAddress), entryBody.Length, entryBody.Length); SetCommitRecordHeader(entryBody.Length, (byte*)physicalAddress); - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); // Return the commit tail return true; @@ -2092,7 +2191,7 @@ private unsafe bool TryEnqueueCommitRecord(ref TsavoriteLogRecoveryInfo info) private bool ShouldCommmitMetadata(ref TsavoriteLogRecoveryInfo info) { - return beginAddress > CommittedBeginAddress || IteratorsChanged(ref info) || info.Cookie != null; + return beginAddress > CommittedBeginAddress || info.Cookie != null; } private void CommitMetadataOnly(ref TsavoriteLogRecoveryInfo info) @@ -2110,10 +2209,8 @@ private void CommitMetadataOnly(ref TsavoriteLogRecoveryInfo info) private void UpdateCommittedState(TsavoriteLogRecoveryInfo recoveryInfo) { - LastPersistedIterators = recoveryInfo.Iterators; CommittedBeginAddress = recoveryInfo.BeginAddress; CommittedUntilAddress = recoveryInfo.UntilAddress; - recoveryInfo.CommitIterators(PersistedIterators); Utility.MonotonicUpdate(ref persistedCommitNum, recoveryInfo.CommitNum, out _); } @@ -2222,27 +2319,6 @@ private void SerialCommitCallbackWorker(CommitInfo commitInfo) _commitTcs?.TrySetResult(lci); } - private bool IteratorsChanged(ref TsavoriteLogRecoveryInfo info) - { - var _lastPersistedIterators = LastPersistedIterators; - if (_lastPersistedIterators == null) - { - return info.Iterators != null && info.Iterators.Count != 0; - } - if (info.Iterators == null || _lastPersistedIterators.Count != info.Iterators.Count) - return true; - foreach (var item in _lastPersistedIterators) - { - if (info.Iterators.TryGetValue(item.Key, out var other)) - { - if (item.Value != other) return true; - } - else - return true; - } - return false; - } - /// /// Synchronously recover instance to TsavoriteLog's latest valid commit, when being used as a readonly log iterator /// @@ -2251,7 +2327,7 @@ public void RecoverReadOnly() if (!readOnlyMode) throw new TsavoriteException("This method can only be used with a read-only TsavoriteLog instance used for iteration. Set TsavoriteLogSettings.ReadOnlyMode to true during creation to indicate this."); - RestoreLatest(out _, out _); + RestoreLatest(out _); SignalWaitingROIterators(); } @@ -2305,9 +2381,8 @@ private bool LoadCommitMetadata(long commitNum, out TsavoriteLogRecoveryInfo inf return true; } - private void RestoreLatest(out Dictionary iterators, out byte[] cookie) + private void RestoreLatest(out byte[] cookie) { - iterators = null; cookie = null; TsavoriteLogRecoveryInfo info = new(); @@ -2374,10 +2449,10 @@ private void RestoreLatest(out Dictionary iterators, out byte[] co } } - iterators = CompleteRestoreFromCommit(info); + CompleteRestoreFromCommit(info); cookie = info.Cookie; commitNum = info.CommitNum; - // After recovery persisted commitnum remians 0 so we need to set it to latest commit number + // After recovery, persisted commitnum remains 0 so we need to set it to latest commit number persistedCommitNum = info.CommitNum; beginAddress = allocator.BeginAddress; if (readOnlyMode) @@ -2386,9 +2461,8 @@ private void RestoreLatest(out Dictionary iterators, out byte[] co if (scanStart > 0) logCommitManager.OnRecovery(scanStart); } - private void RestoreSpecificCommit(long requestedCommitNum, out Dictionary iterators, out byte[] cookie) + private void RestoreSpecificCommit(long requestedCommitNum, out byte[] cookie) { - iterators = null; cookie = null; TsavoriteLogRecoveryInfo info = new(); @@ -2450,7 +2524,7 @@ private void RestoreSpecificCommit(long requestedCommitNum, out Dictionary /// Restore log asynchronously /// - private async ValueTask<(Dictionary, byte[])> RestoreLatestAsync(CancellationToken cancellationToken) + private async ValueTask RestoreLatestAsync(CancellationToken cancellationToken) { TsavoriteLogRecoveryInfo info = new(); @@ -2507,7 +2581,7 @@ private void RestoreSpecificCommit(long requestedCommitNum, out Dictionary(), null); + return null; } if (!readOnlyMode) @@ -2521,7 +2595,7 @@ private void RestoreSpecificCommit(long requestedCommitNum, out Dictionary 0) logCommitManager.OnRecovery(scanStart); - return (iterators, cookie); + return cookie; } - private Dictionary CompleteRestoreFromCommit(TsavoriteLogRecoveryInfo info) + private void CompleteRestoreFromCommit(TsavoriteLogRecoveryInfo info) { CommittedUntilAddress = info.UntilAddress; CommittedBeginAddress = info.BeginAddress; SafeTailAddress = info.UntilAddress; - - // Fix uncommitted addresses in iterators - var recoveredIterators = info.Iterators; - if (recoveredIterators != null) - { - List keys = [.. recoveredIterators.Keys]; - foreach (var key in keys) - if (recoveredIterators[key] > SafeTailAddress) - recoveredIterators[key] = SafeTailAddress; - } - return recoveredIterators; } /// @@ -2594,7 +2657,7 @@ private unsafe bool TryAppend(IReadOnlySpanBatch readOnlySpanBatch, out long log SetHeader(entryLength, (byte*)physicalAddress); physicalAddress += Align(entryLength) + headerSize; } - if (AutoRefreshSafeTailAddress) DoAutoRefreshSafeTailAddress(); + safeTailRefreshEntryEnqueued?.Signal(); epoch.Suspend(); if (AutoCommit) Commit(); return true; @@ -2753,7 +2816,6 @@ private bool CommitInternal(out long commitTail, out long actualCommitNum, bool Cookie = cookie, Callback = callback, }; - info.SnapshotIterators(PersistedIterators); var commitRequired = ShouldCommmitMetadata(ref info) || (commitCoveredAddress < TailAddress); // Only apply commit policy if not a strong commit if (fastForwardAllowed && !commitPolicy.AdmitCommit(TailAddress, commitRequired)) diff --git a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogIterator.cs b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogIterator.cs index f7f7159eb0..b23eedefa0 100644 --- a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogIterator.cs +++ b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogIterator.cs @@ -17,22 +17,15 @@ namespace Tsavorite.core /// /// Scan iterator for hybrid log /// - public sealed class TsavoriteLogScanIterator : ScanIteratorBase, IDisposable + public class TsavoriteLogScanIterator : ScanIteratorBase, IDisposable { - private readonly string name; - private readonly TsavoriteLog tsavoriteLog; + protected readonly TsavoriteLog tsavoriteLog; private readonly BlittableAllocatorImpl allocator; private readonly BlittableFrame frame; private readonly GetMemory getMemory; private readonly int headerSize; - private readonly bool scanUncommitted; - private bool disposed = false; - internal long requestedCompletedUntilAddress; - - /// - /// Iteration completed until (as part of commit) - /// - public long CompletedUntilAddress; + protected readonly bool scanUncommitted; + protected bool disposed = false; /// /// Whether iteration has ended, either because we reached the end address of iteration, or because @@ -50,12 +43,11 @@ public sealed class TsavoriteLogScanIterator : ScanIteratorBase, IDisposable /// /// /// - /// /// /// /// internal unsafe TsavoriteLogScanIterator(TsavoriteLog tsavoriteLog, BlittableAllocatorImpl hlog, long beginAddress, long endAddress, - GetMemory getMemory, ScanBufferingMode scanBufferingMode, LightEpoch epoch, int headerSize, string name, bool scanUncommitted = false, ILogger logger = null) + GetMemory getMemory, ScanBufferingMode scanBufferingMode, LightEpoch epoch, int headerSize, bool scanUncommitted = false, ILogger logger = null) : base(beginAddress == 0 ? hlog.GetFirstValidLogicalAddress(0) : beginAddress, endAddress, scanBufferingMode, false, epoch, hlog.LogPageSizeBits, logger: logger) { this.tsavoriteLog = tsavoriteLog; @@ -63,10 +55,6 @@ internal unsafe TsavoriteLogScanIterator(TsavoriteLog tsavoriteLog, BlittableAll this.getMemory = getMemory; this.headerSize = headerSize; this.scanUncommitted = scanUncommitted; - - this.name = name; - CompletedUntilAddress = beginAddress; - if (frameSize > 0) frame = new BlittableFrame(frameSize, hlog.PageSize, hlog.GetDeviceSectorSize()); } @@ -174,7 +162,7 @@ public ValueTask WaitAsync(CancellationToken token = default) if (NextAddress < tsavoriteLog.SafeTailAddress) return new ValueTask(true); - return SlowWaitUncommittedAsync(this, token); + return SlowWaitUncommittedAsync(token); } private static async ValueTask SlowWaitAsync(TsavoriteLogScanIterator @this, CancellationToken token) @@ -197,23 +185,23 @@ private static async ValueTask SlowWaitAsync(TsavoriteLogScanIterator @thi } } - private static async ValueTask SlowWaitUncommittedAsync(TsavoriteLogScanIterator @this, CancellationToken token) + protected virtual async ValueTask SlowWaitUncommittedAsync(CancellationToken token) { while (true) { - if (@this.disposed) + if (this.disposed) return false; - if (@this.Ended) return false; + if (this.Ended) return false; - var tcs = @this.tsavoriteLog.refreshUncommittedTcs; + var tcs = this.tsavoriteLog.refreshUncommittedTcs; if (tcs == null) { var newTcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); - tcs = Interlocked.CompareExchange(ref @this.tsavoriteLog.refreshUncommittedTcs, newTcs, null); + tcs = Interlocked.CompareExchange(ref this.tsavoriteLog.refreshUncommittedTcs, newTcs, null); tcs ??= newTcs; // successful CAS so update the local var } - if (@this.NextAddress < @this.tsavoriteLog.SafeTailAddress) + if (this.NextAddress < this.tsavoriteLog.SafeTailAddress) return true; // Ignore refresh-uncommitted exceptions, except when the token is signaled @@ -612,38 +600,6 @@ public unsafe bool UnsafeGetNext(out byte* entry, out int entryLength, out long /// public void UnsafeRelease() => epoch.Suspend(); - /// - /// Mark iterator complete until specified address. Info is not - /// persisted until a subsequent commit operation on the log. - /// - /// - public void CompleteUntil(long address) - { - Utility.MonotonicUpdate(ref requestedCompletedUntilAddress, address, out _); - } - - /// - /// Mark iterator complete until the end of the record at specified - /// address. Info is not persisted until a subsequent commit operation - /// on the log. Note: this is slower than CompleteUntil() because the - /// record's length needs to be looked up first. - /// - /// - /// - /// The actual completion address (end address of the record) - public async ValueTask CompleteUntilRecordAtAsync(long recordStartAddress, CancellationToken token = default) - { - int len = await tsavoriteLog.ReadRecordLengthAsync(recordStartAddress, token: token); - long endAddress = recordStartAddress + headerSize + Align(len); - CompleteUntil(endAddress); - return endAddress; - } - - internal void UpdateCompletedUntilAddress(long address) - { - Utility.MonotonicUpdate(ref CompletedUntilAddress, address, out _); - } - /// /// Dispose the iterator /// @@ -656,9 +612,6 @@ public override void Dispose() // Dispose/unpin the frame from memory frame?.Dispose(); - if (name != null) - tsavoriteLog.PersistedIterators.TryRemove(name, out _); - if (Interlocked.Decrement(ref tsavoriteLog.logRefCount) == 0) tsavoriteLog.TrueDispose(); @@ -773,10 +726,19 @@ private unsafe bool GetNextInternal(out long physicalAddress, out int entryLengt outNextAddress = currentAddress; } + var _headAddress = allocator.HeadAddress; + + // Fast forward to memory in case we are flushing to a null device + if (allocator.IsNullDevice && currentAddress < _headAddress) + { + Utility.MonotonicUpdate(ref nextAddress, _headAddress, out _); + currentAddress = nextAddress; + outNextAddress = currentAddress; + } + var _currentPage = currentAddress >> allocator.LogPageSizeBits; var _currentFrame = _currentPage % frameSize; var _currentOffset = currentAddress & allocator.PageSizeMask; - var _headAddress = allocator.HeadAddress; if (disposed) return false; @@ -879,15 +841,22 @@ private unsafe bool ExpandGetNextInternal(long startPhysicalAddress, ref int tot // Check for boundary conditions if (currentAddress < allocator.BeginAddress) { - Utility.MonotonicUpdate(ref nextAddress, allocator.BeginAddress, out _); - currentAddress = nextAddress; - outNextAddress = currentAddress; + // Cannot expand, return false + return false; + } + + var _headAddress = allocator.HeadAddress; + + // Fast forward to memory in case we are flushing to a null device + if (allocator.IsNullDevice && currentAddress < _headAddress) + { + // Cannot expand, return false + return false; } var _currentPage = currentAddress >> allocator.LogPageSizeBits; var _currentFrame = _currentPage % frameSize; var _currentOffset = currentAddress & allocator.PageSizeMask; - var _headAddress = allocator.HeadAddress; if (disposed) return false; diff --git a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogRecoveryInfo.cs b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogRecoveryInfo.cs index fbf0826ba5..244d36f085 100644 --- a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogRecoveryInfo.cs +++ b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogRecoveryInfo.cs @@ -3,8 +3,6 @@ using System; using System.Buffers.Binary; -using System.Collections.Concurrent; -using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text; @@ -31,11 +29,6 @@ public struct TsavoriteLogRecoveryInfo /// public long UntilAddress; - /// - /// Persisted iterators - /// - public Dictionary Iterators; - /// /// User-specified commit cookie /// @@ -63,7 +56,6 @@ public void Initialize() { BeginAddress = 0; UntilAddress = 0; - Iterators = null; Cookie = null; } @@ -104,7 +96,6 @@ public void Initialize(ReadOnlySpan input) if (iteratorCount > 0) { - Iterators = new Dictionary(iteratorCount); for (var i = 0; i < iteratorCount; i++) { var keyLength = BinaryPrimitives.ReadInt32LittleEndian(input); @@ -115,8 +106,6 @@ public void Initialize(ReadOnlySpan input) var iteratorValue = BinaryPrimitives.ReadInt64LittleEndian(input); input = input.Slice(sizeof(long)); - - Iterators.Add(iteratorKey, iteratorValue); } } @@ -169,8 +158,6 @@ public readonly byte[] ToByteArray() writer.Write(TsavoriteLogRecoveryVersion); // version int iteratorCount = 0; - if (Iterators != null) iteratorCount = Iterators.Count; - int cookieLength = -1; long cookieChecksum = 0; if (Cookie != null) @@ -188,19 +175,7 @@ public readonly byte[] ToByteArray() writer.Write(BeginAddress); writer.Write(UntilAddress); writer.Write(CommitNum); - - writer.Write(iteratorCount); - if (iteratorCount > 0) - { - foreach (var kvp in Iterators) - { - var bytes = Encoding.UTF8.GetBytes(kvp.Key); - writer.Write(bytes.Length); - writer.Write(bytes); - writer.Write(kvp.Value); - } - } - + writer.Write(iteratorCount); // leaving this field for backwards compatibility writer.Write(cookieLength); if (cookieLength > 0) writer.Write(Cookie); @@ -213,47 +188,7 @@ public readonly byte[] ToByteArray() /// size of this recovery info serialized public int SerializedSize() { - var iteratorSize = sizeof(int); - if (Iterators != null) - { - foreach (var kvp in Iterators) - iteratorSize += sizeof(int) + Encoding.UTF8.GetByteCount(kvp.Key) + sizeof(long); - } - - return sizeof(int) + 4 * sizeof(long) + iteratorSize + sizeof(int) + (Cookie?.Length ?? 0); - } - - /// - /// Take snapshot of persisted iterators - /// - /// Persisted iterators - public void SnapshotIterators(ConcurrentDictionary persistedIterators) - { - Iterators = new Dictionary(); - - if (!persistedIterators.IsEmpty) - { - foreach (var kvp in persistedIterators) - { - Iterators.Add(kvp.Key, kvp.Value.requestedCompletedUntilAddress); - } - } - } - - /// - /// Update iterators after persistence - /// - /// Persisted iterators - public void CommitIterators(ConcurrentDictionary persistedIterators) - { - if (Iterators?.Count > 0) - { - foreach (var kvp in Iterators) - { - if (persistedIterators.TryGetValue(kvp.Key, out TsavoriteLogScanIterator iterator)) - iterator.UpdateCompletedUntilAddress(kvp.Value); - } - } + return sizeof(int) + 4 * sizeof(long) + sizeof(int) + sizeof(int) + (Cookie?.Length ?? 0); } /// diff --git a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogScanSingleIterator.cs b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogScanSingleIterator.cs new file mode 100644 index 0000000000..7ffb879bff --- /dev/null +++ b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogScanSingleIterator.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace Tsavorite.core +{ + using EmptyStoreFunctions = StoreFunctions>; + + /// + /// Scan iterator for hybrid log - only a single scan is supported per instance + /// This modification allows us to use a SingleWaiterAutoResetEvent per iterator + /// so we can avoid TCS allocations per tail bump. + /// + public sealed class TsavoriteLogScanSingleIterator : TsavoriteLogScanIterator + { + readonly SingleWaiterAutoResetEvent onEnqueue; + + internal TsavoriteLogScanSingleIterator(TsavoriteLog tsavoriteLog, BlittableAllocatorImpl hlog, long beginAddress, long endAddress, + GetMemory getMemory, ScanBufferingMode scanBufferingMode, LightEpoch epoch, int headerSize, bool scanUncommitted = false, ILogger logger = null) + : base(tsavoriteLog, hlog, beginAddress, endAddress, getMemory, scanBufferingMode, epoch, headerSize, scanUncommitted, logger) + { + onEnqueue = new() + { + RunContinuationsAsynchronously = true + }; + } + + public override void Dispose() + { + tsavoriteLog.RemoveIterator(this); + base.Dispose(); + // Any awaiting iterator should be woken up during dispose + onEnqueue.Signal(); + } + + public void Signal() + => onEnqueue.Signal(); + + protected override async ValueTask SlowWaitUncommittedAsync(CancellationToken token) + { + while (!token.IsCancellationRequested) + { + if (this.disposed) + return false; + if (this.Ended) return false; + + if (this.NextAddress < this.tsavoriteLog.SafeTailAddress) + return true; + + // Ignore refresh-uncommitted exceptions, except when the token is signaled + await onEnqueue.WaitAsync().ConfigureAwait(false); + } + return false; + } + } +} \ No newline at end of file diff --git a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogSettings.cs b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogSettings.cs index 194f48ddf6..63e7fa2140 100644 --- a/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogSettings.cs +++ b/libs/storage/Tsavorite/cs/src/core/TsavoriteLog/TsavoriteLogSettings.cs @@ -131,9 +131,9 @@ public class TsavoriteLogSettings : IDisposable public bool TryRecoverLatest = true; /// - /// Whether we refresh safe tail address as records are inserted + /// SafeTailAddress refresh frequency in milliseconds. -1 => disabled; 0 => immediate refresh after every enqueue, >1 => refresh period in milliseconds. /// - public bool AutoRefreshSafeTailAddress = false; + public int SafeTailRefreshFrequencyMs = -1; /// /// Whether we automatically commit the log as records are inserted diff --git a/libs/storage/Tsavorite/cs/test/EnqueueTests.cs b/libs/storage/Tsavorite/cs/test/EnqueueTests.cs index 49df3cbadd..ab40092498 100644 --- a/libs/storage/Tsavorite/cs/test/EnqueueTests.cs +++ b/libs/storage/Tsavorite/cs/test/EnqueueTests.cs @@ -185,7 +185,6 @@ public async Task EnqueueAsyncBasicTest([Values] TestUtils.DeviceType deviceType var input1 = new byte[] { 0, 1, 2, 3 }; var input2 = new byte[] { 4, 5, 6, 7, 8, 9, 10 }; var input3 = new byte[] { 11, 12 }; - string readerName = "abc"; await log.EnqueueAsync(input1, cancellationToken); await log.EnqueueAsync(input2); @@ -197,7 +196,7 @@ public async Task EnqueueAsyncBasicTest([Values] TestUtils.DeviceType deviceType // Read the log to make sure all entries are put in int currentEntry = 1; - using (var iter = log.Scan(0, long.MaxValue, readerName)) + using (var iter = log.Scan(0, long.MaxValue)) { while (iter.GetNext(out byte[] result, out _, out _)) { @@ -232,9 +231,7 @@ public async Task EnqueueAsyncBasicTest([Values] TestUtils.DeviceType deviceType // Make sure expected length is same as current - also makes sure that data verification was not skipped ClassicAssert.AreEqual(expectedEntryCount, currentEntry); - } - } } } \ No newline at end of file diff --git a/libs/storage/Tsavorite/cs/test/InsertAtTailSpanByteStressTests.cs b/libs/storage/Tsavorite/cs/test/InsertAtTailSpanByteStressTests.cs new file mode 100644 index 0000000000..747b9a2353 --- /dev/null +++ b/libs/storage/Tsavorite/cs/test/InsertAtTailSpanByteStressTests.cs @@ -0,0 +1,318 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Threading.Tasks; +using NUnit.Framework; +using NUnit.Framework.Legacy; +using Tsavorite.core; +using static Tsavorite.test.TestUtils; + +#pragma warning disable // Add parentheses for clarity + +namespace Tsavorite.test.InsertAtTailStressTests +{ + using SpanByteStoreFunctions = StoreFunctions; + + // Number of mutable pages for this test + public enum MutablePages + { + Zero, + One, + Two + } + + class SpanByteInsertAtTailChainTests + { + private TsavoriteKV> store; + private IDevice log; + SpanByteComparerModulo comparer; + + const long ValueAdd = 1_000_000_000; + const long NumKeys = 2_000; + + long GetMutablePageCount(MutablePages mp) => mp switch + { + MutablePages.Zero => 0, + MutablePages.One => 1, + MutablePages.Two => 2, + _ => 8 + }; + + [SetUp] + public void Setup() + { + DeleteDirectory(MethodTestDir, wait: true); + + string filename = Path.Join(MethodTestDir, $"{GetType().Name}.log"); + log = new NullDevice(); + + HashModulo modRange = HashModulo.NoMod; + long mutablePages = GetMutablePageCount(MutablePages.Two); + foreach (var arg in TestContext.CurrentContext.Test.Arguments) + { + if (arg is HashModulo cr) + { + modRange = cr; + continue; + } + if (arg is MutablePages mp) + { + mutablePages = GetMutablePageCount(mp); + continue; + } + } + + // Make the main log mutable region small enough that we force the readonly region to stay close to tail, causing inserts. + int pageBits = 15, memoryBits = 34; + KVSettings kvSettings = new() + { + LogDevice = log, + PageSize = 1L << pageBits, + MemorySize = 1L << memoryBits, + MutableFraction = 8.0 / (1 << (memoryBits - pageBits)), + }; + store = new(kvSettings + , StoreFunctions.Create(comparer, SpanByteRecordDisposer.Instance) + , (allocatorSettings, storeFunctions) => new(allocatorSettings, storeFunctions) + ); + + comparer = new SpanByteComparerModulo(modRange); + } + + [TearDown] + public void TearDown() + { + store?.Dispose(); + store = null; + log?.Dispose(); + log = null; + DeleteDirectory(MethodTestDir); + } + + internal class RmwSpanByteFunctions : SpanByteFunctions + { + /// + public override bool ConcurrentWriter(ref SpanByte key, ref SpanByte input, ref SpanByte src, ref SpanByte dst, ref SpanByteAndMemory output, ref UpsertInfo upsertInfo, ref RecordInfo recordInfo) + { + src.CopyTo(ref dst); + src.CopyTo(ref output, memoryPool); + return true; + } + + /// + public override bool SingleWriter(ref SpanByte key, ref SpanByte input, ref SpanByte src, ref SpanByte dst, ref SpanByteAndMemory output, ref UpsertInfo upsertInfo, WriteReason reason, ref RecordInfo recordInfo) + { + src.CopyTo(ref dst); + src.CopyTo(ref output, memoryPool); + return true; + } + + /// + public override bool CopyUpdater(ref SpanByte key, ref SpanByte input, ref SpanByte oldValue, ref SpanByte newValue, ref SpanByteAndMemory output, ref RMWInfo rmwInfo, ref RecordInfo recordInfo) + { + input.CopyTo(ref newValue); + input.CopyTo(ref output, memoryPool); + return true; + } + + /// + public override bool InPlaceUpdater(ref SpanByte key, ref SpanByte input, ref SpanByte value, ref SpanByteAndMemory output, ref RMWInfo rmwInfo, ref RecordInfo recordInfo) + { + // The default implementation of IPU simply writes input to destination, if there is space + base.InPlaceUpdater(ref key, ref input, ref value, ref output, ref rmwInfo, ref recordInfo); + input.CopyTo(ref output, memoryPool); + return true; + } + + /// + public override bool InitialUpdater(ref SpanByte key, ref SpanByte input, ref SpanByte value, ref SpanByteAndMemory output, ref RMWInfo rmwInfo, ref RecordInfo recordInfo) + { + Assert.Fail("For these tests, InitialUpdater should never be called"); + return false; + } + } + + unsafe void PopulateAndSetReadOnlyToTail() + { + using var session = store.NewSession>(new SpanByteFunctions()); + var bContext = session.BasicContext; + + Span keyVec = stackalloc byte[sizeof(long)]; + var key = SpanByte.FromPinnedSpan(keyVec); + + for (long ii = 0; ii < NumKeys; ii++) + { + ClassicAssert.IsTrue(BitConverter.TryWriteBytes(keyVec, ii)); + var status = bContext.Upsert(ref key, ref key); + ClassicAssert.IsTrue(status.Record.Created, status.ToString()); + } + bContext.CompletePending(true); + store.Log.ShiftReadOnlyAddress(store.Log.TailAddress, wait: true); + } + + [Test] + [Category(TsavoriteKVTestCategory)] + [Category(StressTestCategory)] + //[Repeat(300)] + public void SpanByteTailInsertMultiThreadTest([Values] HashModulo modRange, [Values(0, 1, 2, 8)] int numReadThreads, [Values(0, 1, 2, 8)] int numWriteThreads, + [Values(UpdateOp.Upsert, UpdateOp.RMW)] UpdateOp updateOp, [Values] MutablePages mutablePages) + { + if (numReadThreads == 0 && numWriteThreads == 0) + Assert.Ignore("Skipped due to 0 threads for both read and update"); + if ((numReadThreads > 2 || numWriteThreads > 2) && IsRunningAzureTests) + Assert.Ignore("Skipped because > 2 threads when IsRunningAzureTests"); + if (TestContext.CurrentContext.CurrentRepeatCount > 0) + Debug.WriteLine($"*** Current test iteration: {TestContext.CurrentContext.CurrentRepeatCount + 1} ***"); + + // Initial population so we know we can read the keys. + PopulateAndSetReadOnlyToTail(); + + const int numIterations = 10; + unsafe void runReadThread(int tid) + { + using var session = store.NewSession>(new SpanByteFunctions()); + var bContext = session.BasicContext; + + Span keyVec = stackalloc byte[sizeof(long)]; + var key = SpanByte.FromPinnedSpan(keyVec); + + for (var iteration = 0; iteration < numIterations; ++iteration) + { + var numCompleted = 0; + for (var ii = 0; ii < NumKeys; ++ii) + { + SpanByteAndMemory output = default; + + ClassicAssert.IsTrue(BitConverter.TryWriteBytes(keyVec, ii)); + var status = bContext.Read(ref key, ref output); + + var numPending = ii - numCompleted; + if (status.IsPending) + ++numPending; + else + { + ++numCompleted; + + ClassicAssert.IsTrue(status.Found, $"tid {tid}, key {ii}, {status}, wasPending {false}, pt 1"); + ClassicAssert.IsNotNull(output.Memory, $"tid {tid}, key {ii}, wasPending {false}, pt 2"); + long value = BitConverter.ToInt64(output.AsReadOnlySpan()); + ClassicAssert.AreEqual(ii, value % ValueAdd, $"tid {tid}, key {ii}, wasPending {false}, pt 3"); + output.Memory.Dispose(); + } + + if (numPending > 0) + { + bContext.CompletePendingWithOutputs(out var completedOutputs, wait: true); + using (completedOutputs) + { + while (completedOutputs.Next()) + { + ++numCompleted; + + status = completedOutputs.Current.Status; + output = completedOutputs.Current.Output; + // Note: do NOT overwrite 'key' here + long keyLong = BitConverter.ToInt64(completedOutputs.Current.Key.AsReadOnlySpan()); + + ClassicAssert.AreEqual(completedOutputs.Current.RecordMetadata.Address == Constants.kInvalidAddress, status.Record.CopiedToReadCache, $"key {keyLong}: {status}"); + + ClassicAssert.IsTrue(status.Found, $"tid {tid}, key {keyLong}, {status}, wasPending {true}, pt 1"); + ClassicAssert.IsNotNull(output.Memory, $"tid {tid}, key {keyLong}, wasPending {true}, pt 2"); + long value = BitConverter.ToInt64(output.AsReadOnlySpan()); + ClassicAssert.AreEqual(keyLong, value % ValueAdd, $"tid {tid}, key {keyLong}, wasPending {true}, pt 3"); + output.Memory.Dispose(); + } + } + } + } + ClassicAssert.AreEqual(NumKeys, numCompleted, "numCompleted"); + } + } + + unsafe void runUpdateThread(int tid) + { + using var session = store.NewSession>(new RmwSpanByteFunctions()); + var bContext = session.BasicContext; + + Span keyVec = stackalloc byte[sizeof(long)]; + var key = SpanByte.FromPinnedSpan(keyVec); + Span inputVec = stackalloc byte[sizeof(long)]; + var input = SpanByte.FromPinnedSpan(inputVec); + + for (var iteration = 0; iteration < numIterations; ++iteration) + { + var numCompleted = 0; + for (var ii = 0; ii < NumKeys; ++ii) + { + SpanByteAndMemory output = default; + + ClassicAssert.IsTrue(BitConverter.TryWriteBytes(keyVec, ii)); + ClassicAssert.IsTrue(BitConverter.TryWriteBytes(inputVec, ii + ValueAdd)); + var status = updateOp == UpdateOp.RMW + ? bContext.RMW(ref key, ref input, ref output) + : bContext.Upsert(ref key, ref input, ref input, ref output); + + var numPending = ii - numCompleted; + if (status.IsPending) + { + ClassicAssert.AreNotEqual(UpdateOp.Upsert, updateOp, "Upsert should not go pending"); + ++numPending; + } + else + { + ++numCompleted; + if (updateOp == UpdateOp.RMW) // Upsert will not try to find records below HeadAddress, but it may find them in-memory + ClassicAssert.IsTrue(status.Found, $"tid {tid}, key {ii}, {status}"); + + long value = BitConverter.ToInt64(output.AsReadOnlySpan()); + ClassicAssert.AreEqual(ii + ValueAdd, value, $"tid {tid}, key {ii}, wasPending {false}"); + + output.Memory?.Dispose(); + } + + if (numPending > 0) + { + bContext.CompletePendingWithOutputs(out var completedOutputs, wait: true); + using (completedOutputs) + { + while (completedOutputs.Next()) + { + ++numCompleted; + + status = completedOutputs.Current.Status; + output = completedOutputs.Current.Output; + // Note: do NOT overwrite 'key' here + long keyLong = BitConverter.ToInt64(completedOutputs.Current.Key.AsReadOnlySpan()); + + if (updateOp == UpdateOp.RMW) // Upsert will not try to find records below HeadAddress, but it may find them in-memory + ClassicAssert.IsTrue(status.Found, $"tid {tid}, key {keyLong}, {status}"); + + long value = BitConverter.ToInt64(output.AsReadOnlySpan()); + ClassicAssert.AreEqual(keyLong + ValueAdd, value, $"tid {tid}, key {keyLong}, wasPending {true}"); + + output.Memory?.Dispose(); + } + } + } + } + ClassicAssert.AreEqual(NumKeys, numCompleted, "numCompleted"); + } + } + + List tasks = new(); // Task rather than Thread for propagation of exception. + for (int t = 1; t <= numReadThreads + numWriteThreads; t++) + { + var tid = t; + if (t <= numReadThreads) + tasks.Add(Task.Factory.StartNew(() => runReadThread(tid))); + else + tasks.Add(Task.Factory.StartNew(() => runUpdateThread(tid))); + } + Task.WaitAll(tasks.ToArray()); + } + } +} \ No newline at end of file diff --git a/libs/storage/Tsavorite/cs/test/LogRecoverReadOnlyTests.cs b/libs/storage/Tsavorite/cs/test/LogRecoverReadOnlyTests.cs index b2153fb95a..a1bac44a48 100644 --- a/libs/storage/Tsavorite/cs/test/LogRecoverReadOnlyTests.cs +++ b/libs/storage/Tsavorite/cs/test/LogRecoverReadOnlyTests.cs @@ -105,7 +105,6 @@ private async Task ReadOnlyConsumerAsync(string deviceName, bool isAsync, Cancel var value = long.Parse(Encoding.UTF8.GetString(result)); ClassicAssert.AreEqual(prevValue + 1, value); prevValue = value; - iter.CompleteUntil(nextAddress); if (prevValue == NumElements - 1) done.Release(); } diff --git a/libs/storage/Tsavorite/cs/test/LogResumeTests.cs b/libs/storage/Tsavorite/cs/test/LogResumeTests.cs index 49a201a6f5..a030915cd9 100644 --- a/libs/storage/Tsavorite/cs/test/LogResumeTests.cs +++ b/libs/storage/Tsavorite/cs/test/LogResumeTests.cs @@ -35,32 +35,32 @@ public void TearDown() [Test] [Category("TsavoriteLog")] - public async Task TsavoriteLogResumePersistedReaderSpec([Values] LogChecksumType logChecksum) + public async Task TsavoriteLogResumePersistedReaderViaCookie([Values] LogChecksumType logChecksum) { CancellationToken cancellationToken = default; var input1 = new byte[] { 0, 1, 2, 3 }; var input2 = new byte[] { 4, 5, 6, 7, 8, 9, 10 }; var input3 = new byte[] { 11, 12 }; - string readerName = "abc"; - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum })) { await l.EnqueueAsync(input1, cancellationToken); await l.EnqueueAsync(input2); await l.EnqueueAsync(input3); await l.CommitAsync(); - using var originalIterator = l.Scan(0, long.MaxValue, readerName); + using var originalIterator = l.Scan(0, long.MaxValue); ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out _, out long recoveryAddress)); - originalIterator.CompleteUntil(recoveryAddress); ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out _, out _)); // move the reader ahead - await l.CommitAsync(); + // convert recoveryAddress to byte[] for cookie + await l.CommitAsync(cookie: BitConverter.GetBytes(recoveryAddress)); } - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum })) { - using var recoveredIterator = l.Scan(0, long.MaxValue, readerName); + var recoveredAddress = BitConverter.ToInt64(l.RecoveredCookie); + using var recoveredIterator = l.Scan(recoveredAddress, long.MaxValue); ClassicAssert.IsTrue(recoveredIterator.GetNext(out byte[] outBuf, out _, out _, out _)); ClassicAssert.True(input2.SequenceEqual(outBuf)); // we should have read in input2, not input1 or input3 } @@ -75,25 +75,24 @@ public async Task TsavoriteLogResumeViaCompleteUntilRecordAtSpec([Values] LogChe var input1 = new byte[] { 0, 1, 2, 3 }; var input2 = new byte[] { 4, 5, 6, 7, 8, 9, 10 }; var input3 = new byte[] { 11, 12 }; - string readerName = "abc"; - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum })) { await l.EnqueueAsync(input1, cancellationToken); await l.EnqueueAsync(input2); await l.EnqueueAsync(input3); await l.CommitAsync(); - using var originalIterator = l.Scan(0, long.MaxValue, readerName); - ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out long recordAddress, out _)); - await originalIterator.CompleteUntilRecordAtAsync(recordAddress); + using var originalIterator = l.Scan(0, long.MaxValue); + ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out long recordAddress, out long nextAddress)); ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out _, out _)); // move the reader ahead - await l.CommitAsync(); + await l.CommitAsync(cookie: BitConverter.GetBytes(nextAddress)); } - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum })) { - using var recoveredIterator = l.Scan(0, long.MaxValue, readerName); + var recoveredAddress = BitConverter.ToInt64(l.RecoveredCookie); + using var recoveredIterator = l.Scan(recoveredAddress, long.MaxValue); ClassicAssert.IsTrue(recoveredIterator.GetNext(out byte[] outBuf, out _, out _, out _)); ClassicAssert.True(input2.SequenceEqual(outBuf)); // we should have read in input2, not input1 or input3 } @@ -106,13 +105,10 @@ public async Task TsavoriteLogResumePersistedReader2([Values] LogChecksumType lo var input1 = new byte[] { 0, 1, 2, 3 }; var input2 = new byte[] { 4, 5, 6, 7, 8, 9, 10 }; var input3 = new byte[] { 11, 12 }; - string readerName = "abc"; using (var logCommitManager = new DeviceLogCommitCheckpointManager(new LocalStorageNamedDeviceFactory(), new DefaultCheckpointNamingScheme(TestUtils.MethodTestDir), removeOutdated)) { - long originalCompleted; - - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) { await l.EnqueueAsync(input1); await l.CommitAsync(); @@ -121,21 +117,20 @@ public async Task TsavoriteLogResumePersistedReader2([Values] LogChecksumType lo await l.EnqueueAsync(input3); await l.CommitAsync(); - using var originalIterator = l.Scan(0, long.MaxValue, readerName); + using var originalIterator = l.Scan(0, long.MaxValue); ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out _, out long recoveryAddress)); - originalIterator.CompleteUntil(recoveryAddress); ClassicAssert.IsTrue(originalIterator.GetNext(out _, out _, out _, out _)); // move the reader ahead - await l.CommitAsync(); - originalCompleted = originalIterator.CompletedUntilAddress; + await l.CommitAsync(cookie: BitConverter.GetBytes(recoveryAddress)); } - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) { - using var recoveredIterator = l.Scan(0, long.MaxValue, readerName); + var recoveredAddress = BitConverter.ToInt64(l.RecoveredCookie); + using var recoveredIterator = l.Scan(recoveredAddress, long.MaxValue); ClassicAssert.IsTrue(recoveredIterator.GetNext(out byte[] outBuf, out _, out _, out _)); // we should have read in input2, not input1 or input3 - ClassicAssert.True(input2.SequenceEqual(outBuf), $"Original: {input2[0]}, Recovered: {outBuf[0]}, Original: {originalCompleted}, Recovered: {recoveredIterator.CompletedUntilAddress}"); + ClassicAssert.True(input2.SequenceEqual(outBuf), $"Original: {input2[0]}, Recovered: {outBuf[0]}"); // TestContext.Progress.WriteLine($"Original: {originalCompleted}, Recovered: {recoveredIterator.CompletedUntilAddress}"); } @@ -149,13 +144,10 @@ public async Task TsavoriteLogResumePersistedReader3([Values] LogChecksumType lo var input1 = new byte[] { 0, 1, 2, 3 }; var input2 = new byte[] { 4, 5, 6, 7, 8, 9, 10 }; var input3 = new byte[] { 11, 12 }; - string readerName = "abcd"; using (var logCommitManager = new DeviceLogCommitCheckpointManager(new LocalStorageNamedDeviceFactory(), new DefaultCheckpointNamingScheme(TestUtils.MethodTestDir), removeOutdated)) { - long originalCompleted; - - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) { await l.EnqueueAsync(input1); await l.CommitAsync(); @@ -164,31 +156,28 @@ public async Task TsavoriteLogResumePersistedReader3([Values] LogChecksumType lo await l.EnqueueAsync(input3); await l.CommitAsync(); - using var originalIterator = l.Scan(0, l.TailAddress, readerName); + using var originalIterator = l.Scan(0, l.TailAddress); int count = 0; await foreach (var item in originalIterator.GetAsyncEnumerable()) { - if (count < 2) // we complete 1st and 2nd item read - originalIterator.CompleteUntil(item.nextAddress); - if (count < 1) // we commit only 1st item read - await l.CommitAsync(); + await l.CommitAsync(cookie: BitConverter.GetBytes(item.nextAddress)); count++; } - originalCompleted = originalIterator.CompletedUntilAddress; } - using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 16, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) + using (var l = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, PageSizeBits = 16, MemorySizeBits = 17, LogChecksum = logChecksum, LogCommitManager = logCommitManager })) { - using var recoveredIterator = l.Scan(0, l.TailAddress, readerName); + var recoveredAddress = BitConverter.ToInt64(l.RecoveredCookie); + using var recoveredIterator = l.Scan(recoveredAddress, l.TailAddress); int count = 0; await foreach (var item in recoveredIterator.GetAsyncEnumerable()) { if (count == 0) // resumed iterator will start at item2 - ClassicAssert.True(input2.SequenceEqual(item.entry), $"Original: {input2[0]}, Recovered: {item.entry[0]}, Original: {originalCompleted}, Recovered: {recoveredIterator.CompletedUntilAddress}"); + ClassicAssert.True(input2.SequenceEqual(item.entry), $"Original: {input2[0]}, Recovered: {item.entry[0]}"); count++; } ClassicAssert.IsTrue(count == 2); diff --git a/libs/storage/Tsavorite/cs/test/LogScanTests.cs b/libs/storage/Tsavorite/cs/test/LogScanTests.cs index bf8d7940ba..86338c96be 100644 --- a/libs/storage/Tsavorite/cs/test/LogScanTests.cs +++ b/libs/storage/Tsavorite/cs/test/LogScanTests.cs @@ -95,6 +95,11 @@ public void PopulateUncommittedLog(TsavoriteLog logUncommitted) // Add to TsavoriteLog logUncommitted.Enqueue(entry); } + + // Wait for safe tail to catch up + while (logUncommitted.SafeTailAddress < logUncommitted.TailAddress) + Thread.Yield(); + } [Test] @@ -234,7 +239,7 @@ public void ScanNoDefaultTest([Values] TestUtils.DeviceType deviceType) // Read the log - Look for the flag so know each entry is unique int currentEntry = 0; - using (var iter = log.Scan(0, 100_000_000, name: null, recover: true, scanBufferingMode: ScanBufferingMode.DoublePageBuffering, scanUncommitted: false)) + using (var iter = log.Scan(0, 100_000_000, recover: true, scanBufferingMode: ScanBufferingMode.DoublePageBuffering, scanUncommitted: false)) { while (iter.GetNext(out byte[] result, out _, out _)) { @@ -266,7 +271,7 @@ public void ScanByNameTest([Values] TestUtils.DeviceType deviceType) // Read the log - Look for the flag so know each entry is unique int currentEntry = 0; - using (var iter = log.Scan(0, 100_000_000, name: "TestScan", recover: true)) + using (var iter = log.Scan(0, 100_000_000, recover: true)) { while (iter.GetNext(out byte[] result, out _, out _)) { @@ -385,7 +390,7 @@ public void ScanUncommittedTest([Values] TestUtils.DeviceType deviceType) // Create log and device here (not in setup) because using DeviceType Enum which can't be used in Setup string filename = Path.Join(TestUtils.MethodTestDir, "LogScan" + deviceType.ToString() + ".log"); device = TestUtils.CreateTestDevice(deviceType, filename); - log = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, SegmentSizeBits = 22, LogCommitDir = TestUtils.MethodTestDir, AutoRefreshSafeTailAddress = true }); + log = new TsavoriteLog(new TsavoriteLogSettings { LogDevice = device, SegmentSizeBits = 22, LogCommitDir = TestUtils.MethodTestDir, SafeTailRefreshFrequencyMs = 0 }); PopulateUncommittedLog(log); // Setting scanUnCommitted to true is actual test here. diff --git a/libs/storage/Tsavorite/cs/test/LogTests.cs b/libs/storage/Tsavorite/cs/test/LogTests.cs index cfb61398b9..4bca495363 100644 --- a/libs/storage/Tsavorite/cs/test/LogTests.cs +++ b/libs/storage/Tsavorite/cs/test/LogTests.cs @@ -180,9 +180,9 @@ protected static async Task LogWriterAsync(TsavoriteLog log, byte[] entry) // Enter in some entries then wait on this separate thread await log.EnqueueAsync(entry); await log.EnqueueAsync(entry); - var commitTask = await log.CommitAsync(null, token: token); + var commitTask = await log.CommitAsync(null, null, token); await log.EnqueueAsync(entry); - await log.CommitAsync(commitTask, token: token); + await log.CommitAsync(commitTask, null, token); } } @@ -618,7 +618,7 @@ public async ValueTask EnqueueAndWaitForCommitAsyncBasicTest([Values] LogChecksu { LogDevice = device, PageSizeBits = 16, - MemorySizeBits = 16, + MemorySizeBits = 17, LogChecksum = logChecksum, LogCommitManager = manager, SegmentSizeBits = 22 @@ -678,7 +678,7 @@ public async ValueTask TruncateUntil2([Values] LogChecksumType logChecksum, [Val LogChecksum = logChecksum, LogCommitManager = manager, TryRecoverLatest = false, - AutoRefreshSafeTailAddress = true + SafeTailRefreshFrequencyMs = 0 }; log = IsAsync(iteratorType) ? await TsavoriteLog.CreateAsync(logSettings) : new TsavoriteLog(logSettings); @@ -690,6 +690,10 @@ public async ValueTask TruncateUntil2([Values] LogChecksumType logChecksum, [Val log.Enqueue(data1); } + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + ClassicAssert.AreEqual(log.TailAddress, log.SafeTailAddress); ClassicAssert.Less(log.CommittedUntilAddress, log.SafeTailAddress); @@ -735,6 +739,10 @@ public async ValueTask TruncateUntil2([Values] LogChecksumType logChecksum, [Val // Enqueue data, becomes auto-visible log.Enqueue(data1); + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + await AssertGetNext(asyncByteVectorIter, asyncMemoryOwnerIter, iter, data1, verifyAtEnd: true); log.Dispose(); @@ -753,7 +761,7 @@ public async ValueTask TruncateUntilPageStart([Values] LogChecksumType logChecks PageSizeBits = 14, LogChecksum = logChecksum, LogCommitManager = manager, - AutoRefreshSafeTailAddress = true + SafeTailRefreshFrequencyMs = 0 }); byte[] data1 = new byte[1000]; for (int i = 0; i < 100; i++) data1[i] = (byte)i; @@ -763,8 +771,11 @@ public async ValueTask TruncateUntilPageStart([Values] LogChecksumType logChecks log.Enqueue(data1); } - ClassicAssert.AreEqual(log.TailAddress, log.SafeTailAddress); + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + ClassicAssert.AreEqual(log.TailAddress, log.SafeTailAddress); ClassicAssert.Less(log.CommittedUntilAddress, log.SafeTailAddress); using (var iter = log.Scan(0, long.MaxValue, scanUncommitted: true)) @@ -809,6 +820,11 @@ public async ValueTask TruncateUntilPageStart([Values] LogChecksumType logChecks // Enqueue data, becomes auto-visible log.Enqueue(data1); + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + + await AssertGetNext(asyncByteVectorIter, asyncMemoryOwnerIter, iter, data1, verifyAtEnd: true); } @@ -954,7 +970,7 @@ public async ValueTask RefreshUncommittedAsyncTest([Values] IteratorType iterato PageSizeBits = 14, LogCommitManager = manager, SegmentSizeBits = 22, - AutoRefreshSafeTailAddress = true + SafeTailRefreshFrequencyMs = 0 }); byte[] data1 = new byte[1000]; for (int i = 0; i < 100; i++) data1[i] = (byte)i; @@ -964,6 +980,10 @@ public async ValueTask RefreshUncommittedAsyncTest([Values] IteratorType iterato log.Enqueue(data1); } + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + ClassicAssert.AreEqual(log.TailAddress, log.SafeTailAddress); ClassicAssert.Less(log.CommittedUntilAddress, log.SafeTailAddress); @@ -1009,6 +1029,10 @@ public async ValueTask RefreshUncommittedAsyncTest([Values] IteratorType iterato // Enqueue additional data item, becomes auto-visible log.Enqueue(data1); + // Wait for safe tail to catch up + while (log.SafeTailAddress < log.TailAddress) + await Task.Yield(); + await AssertGetNext(asyncByteVectorIter, asyncMemoryOwnerIter, iter, data1, verifyAtEnd: true); } @@ -1161,7 +1185,7 @@ public async ValueTask TsavoriteLogAsyncConsumerTestAfterDisposeIterator([Values var nextAddress = 0L; - using (var iter = log.Scan(0, long.MaxValue, "TEST")) + using (var iter = log.Scan(0, long.MaxValue)) { var count = 0; while (iter.GetNext(out _, out _, out _, out nextAddress)) count++; @@ -1181,7 +1205,7 @@ public async ValueTask TsavoriteLogAsyncConsumerTestAfterDisposeIterator([Values log.Commit(true); log.CompleteLog(true); - using (var iter = log.Scan(nextAddress, long.MaxValue, "TEST")) + using (var iter = log.Scan(nextAddress, long.MaxValue)) { var counter = new Counter(log); var consumer = new TsavoriteLogGeneralTests.TestConsumer(counter, entry); diff --git a/libs/storage/Tsavorite/cs/test/NeedCopyUpdateTests.cs b/libs/storage/Tsavorite/cs/test/NeedCopyUpdateTests.cs index 7eb1d0fa56..2eeab310e6 100644 --- a/libs/storage/Tsavorite/cs/test/NeedCopyUpdateTests.cs +++ b/libs/storage/Tsavorite/cs/test/NeedCopyUpdateTests.cs @@ -187,7 +187,7 @@ public void Setup() IndexSize = 1L << 13, LogDevice = log, MutableFraction = 0.1, - MemorySize = 1L << PageSizeBits, + MemorySize = 1L << (PageSizeBits + 1), PageSize = 1L << PageSizeBits }, StoreFunctions.Create(LongKeyComparer.Instance) , (allocatorSettings, storeFunctions) => new(allocatorSettings, storeFunctions) diff --git a/libs/storage/Tsavorite/cs/test/ReadCacheChainTests.cs b/libs/storage/Tsavorite/cs/test/ReadCacheChainTests.cs index 6996c00f34..adefeb6bbd 100644 --- a/libs/storage/Tsavorite/cs/test/ReadCacheChainTests.cs +++ b/libs/storage/Tsavorite/cs/test/ReadCacheChainTests.cs @@ -16,37 +16,6 @@ #pragma warning disable // Add parentheses for clarity -namespace Tsavorite.test.ReadCacheTests -{ - // Must be in a separate block so the "using StructStoreFunctions" is the first line in its namespace declaration. - internal class LongComparerModulo : IKeyComparer - { - readonly long mod; - - internal LongComparerModulo(long mod) => this.mod = mod; - - public bool Equals(ref long k1, ref long k2) => k1 == k2; - - public long GetHashCode64(ref long k) => mod == 0 ? k : k % mod; - } - - internal struct SpanByteComparerModulo : IKeyComparer - { - readonly HashModulo modRange; - - internal SpanByteComparerModulo(HashModulo mod) => modRange = mod; - - public readonly bool Equals(ref SpanByte k1, ref SpanByte k2) => SpanByteComparer.StaticEquals(ref k1, ref k2); - - // Force collisions to create a chain - public readonly long GetHashCode64(ref SpanByte k) - { - var value = SpanByteComparer.StaticGetHashCode64(ref k); - return modRange != HashModulo.NoMod ? value % (long)modRange : value; - } - } -} - namespace Tsavorite.test.ReadCacheTests { using LongAllocator = BlittableAllocator>>; diff --git a/libs/storage/Tsavorite/cs/test/RecoverReadOnlyTest.cs b/libs/storage/Tsavorite/cs/test/RecoverReadOnlyTest.cs index 9e57a3c35e..7c38c13cc6 100644 --- a/libs/storage/Tsavorite/cs/test/RecoverReadOnlyTest.cs +++ b/libs/storage/Tsavorite/cs/test/RecoverReadOnlyTest.cs @@ -7,7 +7,6 @@ using System.Threading; using System.Threading.Tasks; using NUnit.Framework; -using NUnit.Framework.Legacy; using Tsavorite.core; //** Note - this test is based on TsavoriteLogPubSub sample found in the samples directory. @@ -115,7 +114,6 @@ public async Task SeparateConsumerAsync(CancellationToken cancellationToken) using var iter = logReadOnly.Scan(logReadOnly.BeginAddress, long.MaxValue); await foreach (var (result, length, currentAddress, nextAddress) in iter.GetAsyncEnumerable(cancellationToken)) { - iter.CompleteUntil(nextAddress); } } diff --git a/libs/storage/Tsavorite/cs/test/ReproReadCacheTest.cs b/libs/storage/Tsavorite/cs/test/ReproReadCacheTest.cs index e0351a42bb..6881d5e9a1 100644 --- a/libs/storage/Tsavorite/cs/test/ReproReadCacheTest.cs +++ b/libs/storage/Tsavorite/cs/test/ReproReadCacheTest.cs @@ -83,11 +83,12 @@ public void Setup() } if (arg is DeviceType deviceType) { - kvSettings.LogDevice = CreateTestDevice(deviceType, filename, deleteOnClose: true); + log = CreateTestDevice(deviceType, filename, deleteOnClose: true); continue; } } - kvSettings.LogDevice ??= Devices.CreateLogDevice(filename, deleteOnClose: true); + + kvSettings.LogDevice = log ??= Devices.CreateLogDevice(filename, deleteOnClose: true); store = new(kvSettings , StoreFunctions.Create() diff --git a/libs/storage/Tsavorite/cs/test/TestUtils.cs b/libs/storage/Tsavorite/cs/test/TestUtils.cs index 6997ec34a6..751e2b75b3 100644 --- a/libs/storage/Tsavorite/cs/test/TestUtils.cs +++ b/libs/storage/Tsavorite/cs/test/TestUtils.cs @@ -10,6 +10,7 @@ using NUnit.Framework.Legacy; using Tsavorite.core; using Tsavorite.devices; +using static Tsavorite.test.TestUtils; namespace Tsavorite.test { @@ -269,6 +270,33 @@ internal static unsafe bool FindHashBucketEntryForKey + { + readonly long mod; + + internal LongComparerModulo(long mod) => this.mod = mod; + + public bool Equals(ref long k1, ref long k2) => k1 == k2; + + public long GetHashCode64(ref long k) => mod == 0 ? k : k % mod; + } + + internal struct SpanByteComparerModulo : IKeyComparer + { + readonly HashModulo modRange; + + internal SpanByteComparerModulo(HashModulo mod) => modRange = mod; + + public readonly bool Equals(ref SpanByte k1, ref SpanByte k2) => SpanByteComparer.StaticEquals(ref k1, ref k2); + + // Force collisions to create a chain + public readonly long GetHashCode64(ref SpanByte k) + { + var value = SpanByteComparer.StaticGetHashCode64(ref k); + return modRange != HashModulo.NoMod ? value % (long)modRange : value; + } + } + static class StaticTestUtils { internal static (Status status, TOutput output) GetSinglePendingResult( diff --git a/playground/ClusterStress/ShardedRespOnlineBench.cs b/playground/ClusterStress/ShardedRespOnlineBench.cs index ba553ee9b8..8bf1d944db 100644 --- a/playground/ClusterStress/ShardedRespOnlineBench.cs +++ b/playground/ClusterStress/ShardedRespOnlineBench.cs @@ -223,8 +223,8 @@ private void InitClients(ClusterNode[] nodes) gcs[j][i] = new GarnetClientSession( endpoint.Address.ToString(), endpoint.Port, - opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null, - bufferSize: Math.Max(bufferSizeValue, opts.IntraThreadParallelism * opts.ValueLength)); + new(Math.Max(bufferSizeValue, opts.IntraThreadParallelism * opts.ValueLength)), + tlsOptions: opts.EnableTLS ? BenchUtils.GetTlsOptions(opts.TlsHost, opts.CertFileName, opts.CertPassword) : null); gcs[j][i].Connect(); if (auth != null) { diff --git a/playground/CommandInfoUpdater/CommandDocsUpdater.cs b/playground/CommandInfoUpdater/CommandDocsUpdater.cs new file mode 100644 index 0000000000..51c7fb7586 --- /dev/null +++ b/playground/CommandInfoUpdater/CommandDocsUpdater.cs @@ -0,0 +1,291 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System.Collections.ObjectModel; +using System.Net; +using Garnet.server; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; + +namespace CommandInfoUpdater +{ + public class CommandDocsUpdater + { + private static readonly string CommandDocsFileName = "RespCommandsDocs.json"; + private static readonly string GarnetCommandDocsJsonPath = "GarnetCommandsDocs.json"; + + /// + /// Tries to generate an updated JSON file containing Garnet's supported commands' docs + /// + /// Output directory for the updated JSON file + /// RESP server port to query commands docs + /// RESP server host to query commands docs + /// Commands to ignore + /// Updated command info data + /// Force update all commands + /// Logger + /// True if file generated successfully + public static bool TryUpdateCommandDocs(string outputDir, int respServerPort, IPAddress respServerHost, + IEnumerable ignoreCommands, IReadOnlyDictionary updatedCommandsInfo, bool force, ILogger logger) + { + logger.LogInformation("Attempting to update RESP commands docs..."); + + IReadOnlyDictionary existingCommandsDocs = + new Dictionary(); + if (!force && !RespCommandDocs.TryGetRespCommandsDocs(out existingCommandsDocs, false, logger)) + { + logger.LogError("Unable to get existing RESP commands docs."); + return false; + } + + var (commandsToAdd, commandsToRemove) = + CommonUtils.GetCommandsToAddAndRemove(existingCommandsDocs, ignoreCommands); + + if (!CommonUtils.GetUserConfirmation(commandsToAdd, commandsToRemove, logger)) + { + logger.LogInformation("User cancelled update operation."); + return false; + } + + if (!CommonUtils.TryGetRespCommandsData(GarnetCommandDocsJsonPath, logger, + out var garnetCommandsDocs) || + garnetCommandsDocs == null) + { + logger.LogError("Unable to read Garnet RESP commands docs from {GarnetCommandInfoJsonPath}.", + GarnetCommandDocsJsonPath); + return false; + } + + IDictionary queriedCommandsDocs = new Dictionary(); + var commandsToQuery = commandsToAdd.Keys.Select(k => k.Command) + .Where(c => updatedCommandsInfo.ContainsKey(c) && !updatedCommandsInfo[c].IsInternal).ToArray(); + if (commandsToQuery.Length > 0 && !TryGetCommandsDocs(commandsToQuery, respServerPort, respServerHost, + logger, out queriedCommandsDocs)) + { + logger.LogError("Unable to get RESP command docs from local RESP server."); + return false; + } + + var additionalCommandsDocs = new Dictionary(); + foreach (var cmd in garnetCommandsDocs.Keys.Union(queriedCommandsDocs.Keys)) + { + if (!additionalCommandsDocs.ContainsKey(cmd)) + { + var baseCommandDocs = queriedCommandsDocs.TryGetValue(cmd, out var doc) + ? doc + : garnetCommandsDocs[cmd]; + + RespCommandDocs[] subCommandsDocs; + if (garnetCommandsDocs.ContainsKey(cmd) && queriedCommandsDocs.ContainsKey(cmd)) + { + var subCommandsInfoMap = new Dictionary(); + + if (garnetCommandsDocs.TryGetValue(cmd, out var garnetCmdDocs) && garnetCmdDocs.SubCommands != null) + { + foreach (var sc in garnetCmdDocs.SubCommands) + subCommandsInfoMap.Add(sc.Name, sc); + } + + if (queriedCommandsDocs.TryGetValue(cmd, out var queriedCmdDocs) && queriedCmdDocs.SubCommands != null) + { + foreach (var sc in queriedCmdDocs.SubCommands) + { + subCommandsInfoMap.TryAdd(sc.Name, sc); + } + } + + subCommandsDocs = [.. subCommandsInfoMap.Values]; + } + else + { + subCommandsDocs = baseCommandDocs.SubCommands; + } + + additionalCommandsDocs.Add(cmd, new RespCommandDocs( + baseCommandDocs.Command, baseCommandDocs.Name, baseCommandDocs.Summary, baseCommandDocs.Group, + baseCommandDocs.Complexity, + baseCommandDocs.DocFlags, baseCommandDocs.ReplacedBy, baseCommandDocs.Arguments, + subCommandsDocs)); + } + } + + var updatedCommandsDocs = GetUpdatedCommandsDocs(existingCommandsDocs, commandsToAdd, commandsToRemove, + additionalCommandsDocs, updatedCommandsInfo); + + var outputPath = Path.Combine(outputDir ?? string.Empty, CommandDocsFileName); + if (!CommonUtils.TryWriteRespCommandsData(outputPath, updatedCommandsDocs, logger)) + { + logger.LogError("Unable to write RESP commands docs to path {outputPath}.", outputPath); + return false; + } + + logger.LogInformation("RESP commands docs updated successfully! Output file written to: {fullOutputPath}", + Path.GetFullPath(outputPath)); + return true; + } + + /// + /// Query RESP server to get missing commands' docs + /// + /// Command to query + /// RESP server port to query + /// RESP server host to query + /// Logger + /// Queried commands docs + /// True if succeeded + private static unsafe bool TryGetCommandsDocs(string[] commandsToQuery, int respServerPort, + IPAddress respServerHost, ILogger logger, out IDictionary commandDocs) + { + commandDocs = default; + + // If there are no commands to query, return + if (commandsToQuery.Length == 0) return true; + + var tmpCommandsDocs = new Dictionary(); + + // Get a map of supported commands to Garnet's RespCommand & ArrayCommand for the parser + var supportedCommands = new ReadOnlyDictionary( + SupportedCommand.SupportedCommandsFlattenedMap.ToDictionary(kvp => kvp.Key, + kvp => kvp.Value.RespCommand, StringComparer.OrdinalIgnoreCase)); + + var configOptions = new ConfigurationOptions() + { + EndPoints = new EndPointCollection(new List + { + new IPEndPoint(respServerHost, respServerPort) + }) + }; + + using var redis = ConnectionMultiplexer.Connect(configOptions); + var db = redis.GetDatabase(0); + + var cmdArgs = new List { "DOCS" }.Union(commandsToQuery).ToArray(); + var result = db.Execute("COMMAND", cmdArgs); + var elemCount = result.Length; + for (var i = 0; i < elemCount; i += 2) + { + if (!RespCommandDocsParser.TryReadFromResp(result, i, supportedCommands, out var cmdDocs, out var cmdName) || cmdDocs == null) + { + logger.LogError("Unable to read RESP command docs from server for command {command}", + cmdName); + return false; + } + + tmpCommandsDocs.Add(cmdName, cmdDocs); + } + + commandDocs = tmpCommandsDocs; + return true; + } + + /// + /// Update the mapping of commands docs + /// + /// Existing command docs mapping + /// Commands to add + /// Commands to remove + /// Queried commands docs + /// Updated commands info + /// + private static IReadOnlyDictionary GetUpdatedCommandsDocs( + IReadOnlyDictionary existingCommandsDocs, + IDictionary commandsToAdd, + IDictionary commandsToRemove, + IDictionary queriedCommandsDocs, + IReadOnlyDictionary updatedCommandsInfo) + { + // Define updated commands as commands to add unified with commands to remove + var updatedCommands = + new HashSet(commandsToAdd.Keys.Union(commandsToRemove.Keys).Select(c => c.Command)); + + // Preserve command docs for all commands that have not been updated + var updatedCommandsDocs = existingCommandsDocs + .Where(existingCommand => !updatedCommands.Contains(existingCommand.Key)) + .ToDictionary(existingCommand => existingCommand.Key, existingCommand => existingCommand.Value); + + // Update commands docs with commands to remove + foreach (var command in commandsToRemove.Where(kvp => !kvp.Value).Select(kvp => kvp.Key)) + { + // Determine updated sub-commands by subtracting from existing sub-commands + var existingSubCommands = existingCommandsDocs[command.Command].SubCommands == null + ? null + : existingCommandsDocs[command.Command].SubCommands.Select(sc => sc.Name).ToArray(); + var remainingSubCommands = existingSubCommands == null ? null : + command.SubCommands == null ? existingSubCommands : + existingSubCommands.Except(command.SubCommands.Keys).ToArray(); + + // Create updated command docs based on existing command + var existingCommandDoc = existingCommandsDocs[command.Command]; + var updatedCommandDoc = new RespCommandDocs( + existingCommandDoc.Command, + existingCommandDoc.Name, + existingCommandDoc.Summary, + existingCommandDoc.Group, + existingCommandDoc.Complexity, + existingCommandDoc.DocFlags, + existingCommandDoc.ReplacedBy, + existingCommandDoc.Arguments, + remainingSubCommands == null || remainingSubCommands.Length == 0 + ? null + : existingCommandDoc.SubCommands.Where(sc => remainingSubCommands.Contains(sc.Name)).ToArray()); + + updatedCommandsDocs.Add(updatedCommandDoc.Name, updatedCommandDoc); + } + + // Update commands docs with commands to add + foreach (var command in commandsToAdd.Keys) + { + RespCommandDocs baseCommandDocs; + List updatedSubCommandsDocs; + // If parent command already exists + if (existingCommandsDocs.ContainsKey(command.Command)) + { + updatedSubCommandsDocs = existingCommandsDocs[command.Command].SubCommands == null + ? new List() + : [.. existingCommandsDocs[command.Command].SubCommands]; + + // Add sub-commands with updated queried command docs + foreach (var subCommandToAdd in command.SubCommands!) + { + updatedSubCommandsDocs.Add(queriedCommandsDocs[command.Command].SubCommands + .First(sc => sc.Name == subCommandToAdd.Key)); + } + + // Set base command as existing sub-command + baseCommandDocs = existingCommandsDocs[command.Command]; + } + // If parent command does not exist + else + { + if (!queriedCommandsDocs.ContainsKey(command.Command) && + updatedCommandsInfo.ContainsKey(command.Command) && + updatedCommandsInfo[command.Command].IsInternal) continue; + + // Set base command as queried command + baseCommandDocs = queriedCommandsDocs[command.Command]; + + // Update sub-commands to contain supported sub-commands only + updatedSubCommandsDocs = command.SubCommands == null + ? null + : baseCommandDocs.SubCommands.Where(sc => command.SubCommands.Keys.Contains(sc.Name)).ToList(); + } + + // Create updated command docs based on base command & updated sub-commands + var updatedCommandDocs = new RespCommandDocs( + baseCommandDocs.Command, + baseCommandDocs.Name, + baseCommandDocs.Summary, + baseCommandDocs.Group, + baseCommandDocs.Complexity, + baseCommandDocs.DocFlags, + baseCommandDocs.ReplacedBy, + baseCommandDocs.Arguments, + updatedSubCommandsDocs?.ToArray()); + + updatedCommandsDocs.Add(updatedCommandDocs.Name, updatedCommandDocs); + } + + return updatedCommandsDocs; + } + } +} \ No newline at end of file diff --git a/playground/CommandInfoUpdater/CommandInfoUpdater.cs b/playground/CommandInfoUpdater/CommandInfoUpdater.cs index b4900f4cd6..60efda2511 100644 --- a/playground/CommandInfoUpdater/CommandInfoUpdater.cs +++ b/playground/CommandInfoUpdater/CommandInfoUpdater.cs @@ -3,7 +3,6 @@ using System.Collections.ObjectModel; using System.Net; -using System.Reflection; using System.Runtime.CompilerServices; using Garnet.common; using Garnet.server; @@ -16,22 +15,25 @@ namespace CommandInfoUpdater /// public class CommandInfoUpdater { + private static readonly string CommandInfoFileName = "RespCommandsInfo.json"; private static readonly string GarnetCommandInfoJsonPath = "GarnetCommandsInfo.json"; /// /// Tries to generate an updated JSON file containing Garnet's supported commands' info /// - /// Output path for the updated JSON file + /// Output directory for the updated JSON file /// RESP server port to query commands info /// RESP server host to query commands info /// Commands to ignore /// Force update all commands /// Logger + /// Updated command info data /// True if file generated successfully - public static bool TryUpdateCommandInfo(string outputPath, int respServerPort, IPAddress respServerHost, - IEnumerable ignoreCommands, bool force, ILogger logger) + public static bool TryUpdateCommandInfo(string outputDir, int respServerPort, IPAddress respServerHost, + IEnumerable ignoreCommands, bool force, ILogger logger, out IReadOnlyDictionary updatedCommandsInfo) { logger.LogInformation("Attempting to update RESP commands info..."); + updatedCommandsInfo = default; IReadOnlyDictionary existingCommandsInfo = new Dictionary(); @@ -42,15 +44,15 @@ public static bool TryUpdateCommandInfo(string outputPath, int respServerPort, I } var (commandsToAdd, commandsToRemove) = - GetCommandsToAddAndRemove(existingCommandsInfo, ignoreCommands); + CommonUtils.GetCommandsToAddAndRemove(existingCommandsInfo, ignoreCommands); - if (!GetUserConfirmation(commandsToAdd, commandsToRemove, logger)) + if (!CommonUtils.GetUserConfirmation(commandsToAdd, commandsToRemove, logger)) { logger.LogInformation("User cancelled update operation."); return false; } - if (!TryGetRespCommandsInfo(GarnetCommandInfoJsonPath, logger, out var garnetCommandsInfo) || + if (!CommonUtils.TryGetRespCommandsData(GarnetCommandInfoJsonPath, logger, out var garnetCommandsInfo) || garnetCommandsInfo == null) { logger.LogError("Unable to read Garnet RESP commands info from {GarnetCommandInfoJsonPath}.", GarnetCommandInfoJsonPath); @@ -58,7 +60,9 @@ public static bool TryUpdateCommandInfo(string outputPath, int respServerPort, I } IDictionary queriedCommandsInfo = new Dictionary(); - var commandsToQuery = commandsToAdd.Keys.Select(k => k.Command).ToArray(); + var commandsToQuery = commandsToAdd.Keys.Select(k => k.Command) + .Where(c => !garnetCommandsInfo.ContainsKey(c) || !garnetCommandsInfo[c].IsInternal).ToArray(); + if (commandsToQuery.Length > 0 && !TryGetCommandsInfo(commandsToQuery, respServerPort, respServerHost, logger, out queriedCommandsInfo)) { @@ -71,13 +75,40 @@ public static bool TryUpdateCommandInfo(string outputPath, int respServerPort, I { if (!additionalCommandsInfo.ContainsKey(cmd)) { - var baseCommandInfo = queriedCommandsInfo.ContainsKey(cmd) - ? queriedCommandsInfo[cmd] - : garnetCommandsInfo[cmd]; + var baseCommandInfo = queriedCommandsInfo.TryGetValue(cmd, out var cmdInfo) + ? cmdInfo : garnetCommandsInfo[cmd]; + + RespCommandsInfo[] subCommandsInfo; + if (garnetCommandsInfo.ContainsKey(cmd) && queriedCommandsInfo.ContainsKey(cmd)) + { + var subCommandsInfoMap = new Dictionary(); + + if (garnetCommandsInfo.TryGetValue(cmd, out var garnetCmdInfo) && garnetCmdInfo.SubCommands != null) + { + foreach (var sc in garnetCmdInfo.SubCommands) + subCommandsInfoMap.Add(sc.Name, sc); + } + + if (queriedCommandsInfo.TryGetValue(cmd, out var queriedCmdInfo) && queriedCmdInfo.SubCommands != null) + { + foreach (var sc in queriedCmdInfo.SubCommands) + { + subCommandsInfoMap.TryAdd(sc.Name, sc); + } + } + + subCommandsInfo = [.. subCommandsInfoMap.Values]; + } + else + { + subCommandsInfo = baseCommandInfo.SubCommands; + } + additionalCommandsInfo.Add(cmd, new RespCommandsInfo() { Command = baseCommandInfo.Command, Name = baseCommandInfo.Name, + IsInternal = baseCommandInfo.IsInternal, Arity = baseCommandInfo.Arity, Flags = baseCommandInfo.Flags, FirstKey = baseCommandInfo.FirstKey, @@ -86,197 +117,26 @@ public static bool TryUpdateCommandInfo(string outputPath, int respServerPort, I AclCategories = baseCommandInfo.AclCategories, Tips = baseCommandInfo.Tips, KeySpecifications = baseCommandInfo.KeySpecifications, - SubCommands = queriedCommandsInfo.ContainsKey(cmd) && garnetCommandsInfo.ContainsKey(cmd) ? - queriedCommandsInfo[cmd].SubCommands.Union(garnetCommandsInfo[cmd].SubCommands).ToArray() : - baseCommandInfo.SubCommands + SubCommands = subCommandsInfo }); } } - var updatedCommandsInfo = GetUpdatedCommandsInfo(existingCommandsInfo, commandsToAdd, commandsToRemove, + updatedCommandsInfo = GetUpdatedCommandsInfo(existingCommandsInfo, commandsToAdd, commandsToRemove, additionalCommandsInfo); - if (!TryWriteRespCommandsInfo(outputPath, updatedCommandsInfo, logger)) + var outputPath = Path.Combine(outputDir ?? string.Empty, CommandInfoFileName); + if (!CommonUtils.TryWriteRespCommandsData(outputPath, updatedCommandsInfo, logger)) { logger.LogError("Unable to write RESP commands info to path {outputPath}.", outputPath); return false; } logger.LogInformation("RESP commands info updated successfully! Output file written to: {fullOutputPath}", Path.GetFullPath(outputPath)); - return true; - } - /// - /// Try to parse JSON file containing commands info - /// - /// Path to JSON file - /// Logger - /// Dictionary mapping command name to RespCommandsInfo - /// True if deserialization was successful - private static bool TryGetRespCommandsInfo(string resourcePath, ILogger logger, out IReadOnlyDictionary commandsInfo) - { - commandsInfo = default; - - var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.EmbeddedResource, null, Assembly.GetExecutingAssembly()); - var commandsInfoProvider = RespCommandsInfoProviderFactory.GetRespCommandsInfoProvider(); - - var importSucceeded = commandsInfoProvider.TryImportRespCommandsInfo(resourcePath, - streamProvider, out var tmpCommandsInfo, logger); - - if (!importSucceeded) return false; - - commandsInfo = tmpCommandsInfo; return true; } - /// - /// Compare existing commands to supported commands map to find added / removed commands / sub-commands - /// - /// Existing command names mapped to current command info - /// Commands to ignore - /// Commands to add and commands to remove mapped to a boolean determining if parent command should be added / removed - private static (IDictionary, IDictionary) - GetCommandsToAddAndRemove(IReadOnlyDictionary existingCommandsInfo, - IEnumerable ignoreCommands) - { - var commandsToAdd = new Dictionary(); - var commandsToRemove = new Dictionary(); - var commandsToIgnore = ignoreCommands != null ? new HashSet(ignoreCommands) : null; - - // Supported commands - var supportedCommands = SupportedCommand.SupportedCommandsMap; - - // Find commands / sub-commands to add - foreach (var supportedCommand in supportedCommands.Values) - { - // Ignore command if in commands to ignore - if (commandsToIgnore != null && commandsToIgnore.Contains(supportedCommand.Command)) continue; - - // If existing commands do not contain parent command, add it and indicate parent command should be added - if (!existingCommandsInfo.ContainsKey(supportedCommand.Command)) - { - commandsToAdd.Add(supportedCommand, true); - continue; - } - - // If existing commands contain parent command and no sub-commands are indicated in supported commands, no sub-commands to add - if (supportedCommand.SubCommands == null) continue; - - string[] subCommandsToAdd; - // If existing commands contain parent command and have no sub-commands, set sub-commands to add as supported command's sub-commands - if (existingCommandsInfo[supportedCommand.Command].SubCommands == null) - { - subCommandsToAdd = [.. supportedCommand.SubCommands]; - } - // Set sub-commands to add as the difference between existing sub-commands and supported command's sub-commands - else - { - var existingSubCommands = new HashSet(existingCommandsInfo[supportedCommand.Command] - .SubCommands - .Select(sc => sc.Name)); - subCommandsToAdd = supportedCommand.SubCommands - .Where(subCommand => !existingSubCommands.Contains(subCommand)).Select(sc => sc).ToArray(); - } - - // If there are sub-commands to add, add a new supported command with the sub-commands to add - // Indicate that parent command should not be added - if (subCommandsToAdd.Length > 0) - { - commandsToAdd.Add( - new SupportedCommand(supportedCommand.Command, supportedCommand.RespCommand, subCommandsToAdd), false); - } - } - - // Find commands / sub-commands to remove - foreach (var existingCommand in existingCommandsInfo) - { - var existingSubCommands = existingCommand.Value.SubCommands; - - // If supported commands do not contain existing parent command, add it to the list and indicate parent command should be removed - if (!supportedCommands.ContainsKey(existingCommand.Key)) - { - commandsToRemove.Add(new SupportedCommand(existingCommand.Key), true); - continue; - } - - // If supported commands contain existing parent command and no sub-commands are indicated in existing commands, no sub-commands to remove - if (existingSubCommands == null) continue; - - // Set sub-commands to remove as the difference between supported sub-commands and existing command's sub-commands - var subCommandsToRemove = (supportedCommands[existingCommand.Key].SubCommands == null - ? existingSubCommands - : existingSubCommands.Where(sc => - !supportedCommands[existingCommand.Key].SubCommands!.Contains(sc.Name))) - .Select(sc => sc.Name) - .ToArray(); - - // If there are sub-commands to remove, add a new supported command with the sub-commands to remove - // Indicate that parent command should not be removed - if (subCommandsToRemove.Length > 0) - { - commandsToRemove.Add( - new SupportedCommand(existingCommand.Key, existingCommand.Value.Command, subCommandsToRemove), false); - } - } - - return (commandsToAdd, commandsToRemove); - } - - /// - /// Indicates to the user which commands and sub-commands are added / removed and get their confirmation to proceed - /// - /// Commands to add - /// Commands to remove - /// Logger - /// True if user wishes to continue, false otherwise - private static bool GetUserConfirmation(IDictionary commandsToAdd, IDictionary commandsToRemove, - ILogger logger) - { - var logCommandsToAdd = commandsToAdd.Where(kvp => kvp.Value).Select(c => c.Key.Command).ToList(); - var logSubCommandsToAdd = commandsToAdd.Where(c => c.Key.SubCommands != null) - .SelectMany(c => c.Key.SubCommands!).ToList(); - var logCommandsToRemove = commandsToRemove.Where(kvp => kvp.Value).Select(c => c.Key.Command).ToList(); - var logSubCommandsToRemove = commandsToRemove.Where(c => c.Key.SubCommands != null) - .SelectMany(c => c.Key.SubCommands!).ToList(); - - logger.LogInformation("Found {logCommandsToAddCount} commands to add and {logSubCommandsToAddCount} sub-commands to add.", logCommandsToAdd.Count, logSubCommandsToAdd.Count); - if (logCommandsToAdd.Count > 0) - logger.LogInformation("Commands to add: {commands}", string.Join(", ", logCommandsToAdd)); - if (logSubCommandsToAdd.Count > 0) - logger.LogInformation("Sub-Commands to add: {commands}", string.Join(", ", logSubCommandsToAdd)); - logger.LogInformation("Found {logCommandsToRemoveCount} commands to remove and {logSubCommandsToRemoveCount} sub-commands to commandsToRemove.", logCommandsToRemove.Count, logSubCommandsToRemove.Count); - if (logCommandsToRemove.Count > 0) - logger.LogInformation("Commands to remove: {commands}", string.Join(", ", logCommandsToRemove)); - if (logSubCommandsToRemove.Count > 0) - logger.LogInformation("Sub-Commands to remove: {commands}", string.Join(", ", logSubCommandsToRemove)); - - if (logCommandsToAdd.Count == 0 && logSubCommandsToAdd.Count == 0 && logCommandsToRemove.Count == 0 && - logSubCommandsToRemove.Count == 0) - { - logger.LogInformation("No commands to update."); - return false; - } - - logger.LogCritical("Would you like to continue? (Y/N)"); - var inputChar = Console.ReadKey(); - while (true) - { - switch (inputChar.KeyChar) - { - case 'Y': - case 'y': - return true; - case 'N': - case 'n': - return false; - default: - logger.LogCritical("Illegal input. Would you like to continue? (Y/N)"); - inputChar = Console.ReadKey(); - break; - } - } - } - /// /// Query RESP server to get missing commands' info /// @@ -311,7 +171,7 @@ private static unsafe bool TryGetCommandsInfo(string[] commandsToQuery, int resp // Get a map of supported commands to Garnet's RespCommand & ArrayCommand for the parser var supportedCommands = new ReadOnlyDictionary( - SupportedCommand.SupportedCommandsMap.ToDictionary(kvp => kvp.Key, + SupportedCommand.SupportedCommandsFlattenedMap.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.RespCommand, StringComparer.OrdinalIgnoreCase)); // Parse the response @@ -330,14 +190,14 @@ private static unsafe bool TryGetCommandsInfo(string[] commandsToQuery, int resp // Parse each command's command info for (var cmdIdx = 0; cmdIdx < cmdCount; cmdIdx++) { - if (!RespCommandInfoParser.TryReadFromResp(ref ptr, end, supportedCommands, out var command) || - command == null) + if (!RespCommandInfoParser.TryReadFromResp(ref ptr, end, supportedCommands, out var command)) { logger.LogError("Unable to read RESP command info from server for command {command}", commandsToQuery[cmdIdx]); return false; } - tmpCommandsInfo.Add(command.Name, command); + if (command != null) + tmpCommandsInfo.Add(command.Name, command); } } @@ -378,7 +238,7 @@ private static IReadOnlyDictionary GetUpdatedCommandsI : existingCommandsInfo[command.Command].SubCommands.Select(sc => sc.Name).ToArray(); var remainingSubCommands = existingSubCommands == null ? null : command.SubCommands == null ? existingSubCommands : - existingSubCommands.Except(command.SubCommands).ToArray(); + existingSubCommands.Except(command.SubCommands.Keys).ToArray(); // Create updated command info based on existing command var existingCommand = existingCommandsInfo[command.Command]; @@ -386,6 +246,7 @@ private static IReadOnlyDictionary GetUpdatedCommandsI { Command = existingCommand.Command, Name = existingCommand.Name, + IsInternal = existingCommand.IsInternal, Arity = existingCommand.Arity, Flags = existingCommand.Flags, FirstKey = existingCommand.FirstKey, @@ -418,7 +279,7 @@ private static IReadOnlyDictionary GetUpdatedCommandsI foreach (var subCommandToAdd in command.SubCommands!) { updatedSubCommands.Add(queriedCommandsInfo[command.Command].SubCommands - .First(sc => sc.Name == subCommandToAdd)); + .First(sc => sc.Name == subCommandToAdd.Key)); } // Set base command as existing sub-command @@ -433,7 +294,7 @@ private static IReadOnlyDictionary GetUpdatedCommandsI // Update sub-commands to contain supported sub-commands only updatedSubCommands = command.SubCommands == null ? null - : baseCommand.SubCommands.Where(sc => command.SubCommands.Contains(sc.Name)).ToList(); + : baseCommand.SubCommands.Where(sc => command.SubCommands.ContainsKey(sc.Name)).ToList(); } // Create updated command info based on base command & updated sub-commands @@ -441,6 +302,7 @@ private static IReadOnlyDictionary GetUpdatedCommandsI { Command = baseCommand.Command, Name = baseCommand.Name, + IsInternal = baseCommand.IsInternal, Arity = baseCommand.Arity, Flags = baseCommand.Flags, FirstKey = baseCommand.FirstKey, @@ -457,26 +319,5 @@ private static IReadOnlyDictionary GetUpdatedCommandsI return updatedCommandsInfo; } - - /// - /// Try to serialize updated commands info to JSON file - /// - /// Output path for JSON file - /// Commands info to serialize - /// Logger - /// True if file written successfully - private static bool TryWriteRespCommandsInfo(string outputPath, - IReadOnlyDictionary commandsInfo, ILogger logger) - { - var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.Local); - var commandsInfoProvider = RespCommandsInfoProviderFactory.GetRespCommandsInfoProvider(); - - var exportSucceeded = commandsInfoProvider.TryExportRespCommandsInfo(outputPath, - streamProvider, commandsInfo, logger); - - if (!exportSucceeded) return false; - - return true; - } } } \ No newline at end of file diff --git a/playground/CommandInfoUpdater/CommandInfoUpdater.csproj b/playground/CommandInfoUpdater/CommandInfoUpdater.csproj index db68765561..41ca9f6678 100644 --- a/playground/CommandInfoUpdater/CommandInfoUpdater.csproj +++ b/playground/CommandInfoUpdater/CommandInfoUpdater.csproj @@ -8,16 +8,19 @@ + + + diff --git a/playground/CommandInfoUpdater/CommonUtils.cs b/playground/CommandInfoUpdater/CommonUtils.cs new file mode 100644 index 0000000000..63503c8bb3 --- /dev/null +++ b/playground/CommandInfoUpdater/CommonUtils.cs @@ -0,0 +1,211 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using System.Reflection; +using Garnet.common; +using Garnet.server; +using Microsoft.Extensions.Logging; + +namespace CommandInfoUpdater +{ + public class CommonUtils + { + /// + /// Try to parse JSON file containing commands data + /// + /// Path to JSON file + /// Logger + /// Dictionary mapping command name to data + /// True if deserialization was successful + internal static bool TryGetRespCommandsData(string resourcePath, ILogger logger, + out IReadOnlyDictionary commandsData) + where TData : class, IRespCommandData + { + commandsData = default; + + var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.EmbeddedResource, null, + Assembly.GetExecutingAssembly()); + var commandsInfoProvider = RespCommandsDataProviderFactory.GetRespCommandsDataProvider(); + + var importSucceeded = commandsInfoProvider.TryImportRespCommandsData(resourcePath, + streamProvider, out var tmpCommandsData, logger); + + if (!importSucceeded) return false; + + commandsData = tmpCommandsData; + return true; + } + + /// + /// Try to serialize updated commands info to JSON file + /// + /// Output path for JSON file + /// Commands info to serialize + /// Logger + /// True if file written successfully + internal static bool TryWriteRespCommandsData(string outputPath, + IReadOnlyDictionary commandsData, ILogger logger) where TData : class, IRespCommandData + { + if (File.Exists(outputPath)) + File.Delete(outputPath); + + var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.Local); + var commandsInfoProvider = RespCommandsDataProviderFactory.GetRespCommandsDataProvider(); + + var exportSucceeded = commandsInfoProvider.TryExportRespCommandsData(outputPath, + streamProvider, commandsData, logger); + + if (!exportSucceeded) return false; + + return true; + } + + /// + /// Compare existing commands to supported commands map to find added / removed commands / sub-commands + /// + /// Existing command names mapped to current command info + /// Commands to ignore + /// Commands to add and commands to remove mapped to a boolean determining if parent command should be added / removed + internal static (IDictionary, IDictionary) + GetCommandsToAddAndRemove(IReadOnlyDictionary existingCommandsInfo, + IEnumerable ignoreCommands) where TData : class, IRespCommandData + { + var commandsToAdd = new Dictionary(); + var commandsToRemove = new Dictionary(); + var commandsToIgnore = ignoreCommands != null ? new HashSet(ignoreCommands) : null; + + // Supported commands + var supportedCommands = SupportedCommand.SupportedCommandsMap; + + // Find commands / sub-commands to add + foreach (var supportedCommand in supportedCommands.Values) + { + // Ignore command if in commands to ignore + if (commandsToIgnore != null && commandsToIgnore.Contains(supportedCommand.Command)) continue; + + // If existing commands do not contain parent command, add it and indicate parent command should be added + if (!existingCommandsInfo.ContainsKey(supportedCommand.Command)) + { + commandsToAdd.Add(supportedCommand, true); + continue; + } + + // If existing commands contain parent command and no sub-commands are indicated in supported commands, no sub-commands to add + if (supportedCommand.SubCommands == null) continue; + + SupportedCommand[] subCommandsToAdd; + // If existing commands contain parent command and have no sub-commands, set sub-commands to add as supported command's sub-commands + if (existingCommandsInfo[supportedCommand.Command].SubCommands == null) + { + subCommandsToAdd = [.. supportedCommand.SubCommands.Values]; + } + // Set sub-commands to add as the difference between existing sub-commands and supported command's sub-commands + else + { + var existingSubCommands = new HashSet(existingCommandsInfo[supportedCommand.Command] + .SubCommands + .Select(sc => sc.Name)); + subCommandsToAdd = supportedCommand.SubCommands + .Where(subCommand => !existingSubCommands.Contains(subCommand.Key)).Select(sc => sc.Value).ToArray(); + } + + // If there are sub-commands to add, add a new supported command with the sub-commands to add + // Indicate that parent command should not be added + if (subCommandsToAdd.Length > 0) + { + commandsToAdd.Add( + new SupportedCommand(supportedCommand.Command, supportedCommand.RespCommand, subCommandsToAdd), false); + } + } + + // Find commands / sub-commands to remove + foreach (var existingCommand in existingCommandsInfo) + { + var existingSubCommands = existingCommand.Value.SubCommands; + + // If supported commands do not contain existing parent command, add it to the list and indicate parent command should be removed + if (!supportedCommands.ContainsKey(existingCommand.Key)) + { + commandsToRemove.Add(new SupportedCommand(existingCommand.Key), true); + continue; + } + + // If supported commands contain existing parent command and no sub-commands are indicated in existing commands, no sub-commands to remove + if (existingSubCommands == null) continue; + + // Set sub-commands to remove as the difference between supported sub-commands and existing command's sub-commands + var subCommandsToRemove = (supportedCommands[existingCommand.Key].SubCommands == null + ? existingSubCommands + : existingSubCommands.Where(sc => + !supportedCommands[existingCommand.Key].SubCommands!.ContainsKey(sc.Name))) + .Select(sc => new SupportedCommand(sc.Name)) + .ToArray(); + + // If there are sub-commands to remove, add a new supported command with the sub-commands to remove + // Indicate that parent command should not be removed + if (subCommandsToRemove.Length > 0) + { + commandsToRemove.Add( + new SupportedCommand(existingCommand.Key, existingCommand.Value.Command, subCommandsToRemove), false); + } + } + + return (commandsToAdd, commandsToRemove); + } + + /// + /// Indicates to the user which commands and sub-commands are added / removed and get their confirmation to proceed + /// + /// Commands to add + /// Commands to remove + /// Logger + /// True if user wishes to continue, false otherwise + internal static bool GetUserConfirmation(IDictionary commandsToAdd, IDictionary commandsToRemove, + ILogger logger) + { + var logCommandsToAdd = commandsToAdd.Where(kvp => kvp.Value).Select(c => c.Key.Command).ToList(); + var logSubCommandsToAdd = commandsToAdd.Where(c => c.Key.SubCommands != null) + .SelectMany(c => c.Key.SubCommands!).Select(c => c.Key).ToList(); + var logCommandsToRemove = commandsToRemove.Where(kvp => kvp.Value).Select(c => c.Key.Command).ToList(); + var logSubCommandsToRemove = commandsToRemove.Where(c => c.Key.SubCommands != null) + .SelectMany(c => c.Key.SubCommands!).Select(c => c.Key).ToList(); + + logger.LogInformation("Found {logCommandsToAddCount} commands to add and {logSubCommandsToAddCount} sub-commands to add.", logCommandsToAdd.Count, logSubCommandsToAdd.Count); + if (logCommandsToAdd.Count > 0) + logger.LogInformation("Commands to add: {commands}", string.Join(", ", logCommandsToAdd)); + if (logSubCommandsToAdd.Count > 0) + logger.LogInformation("Sub-Commands to add: {commands}", string.Join(", ", logSubCommandsToAdd)); + logger.LogInformation("Found {logCommandsToRemoveCount} commands to remove and {logSubCommandsToRemoveCount} sub-commands to commandsToRemove.", logCommandsToRemove.Count, logSubCommandsToRemove.Count); + if (logCommandsToRemove.Count > 0) + logger.LogInformation("Commands to remove: {commands}", string.Join(", ", logCommandsToRemove)); + if (logSubCommandsToRemove.Count > 0) + logger.LogInformation("Sub-Commands to remove: {commands}", string.Join(", ", logSubCommandsToRemove)); + + if (logCommandsToAdd.Count == 0 && logSubCommandsToAdd.Count == 0 && logCommandsToRemove.Count == 0 && + logSubCommandsToRemove.Count == 0) + { + logger.LogInformation("No commands to update."); + return false; + } + + logger.LogCritical("Would you like to continue? (Y/N)"); + var inputChar = Console.ReadKey(); + while (true) + { + switch (inputChar.KeyChar) + { + case 'Y': + case 'y': + return true; + case 'N': + case 'n': + return false; + default: + logger.LogCritical("Illegal input. Would you like to continue? (Y/N)"); + inputChar = Console.ReadKey(); + break; + } + } + } + } +} \ No newline at end of file diff --git a/playground/CommandInfoUpdater/GarnetCommandsDocs.json b/playground/CommandInfoUpdater/GarnetCommandsDocs.json new file mode 100644 index 0000000000..928d0e0bc7 --- /dev/null +++ b/playground/CommandInfoUpdater/GarnetCommandsDocs.json @@ -0,0 +1,360 @@ +[ + { + "Command": "COMMITAOF", + "Name": "COMMITAOF", + "Group": "Server", + "Summary": "Commit to append-only file." + }, + { + "Command": "COSCAN", + "Name": "COSCAN", + "Group": "Generic", + "Summary": "Iterates over members of a collection object.", + "Complexity": "O(1) for every call. O(N) for a complete iteration, including enough command calls for the cursor to return back to 0. N is the number of elements inside the collection.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CURSOR", + "DisplayText": "cursor", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PATTERN", + "DisplayText": "pattern", + "Type": "Pattern", + "Token": "MATCH", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "COUNT", + "DisplayText": "count", + "Type": "Integer", + "Token": "COUNT", + "ArgumentFlags": "Optional" + } + ] + }, + { + "Command": "FORCEGC", + "Name": "FORCEGC", + "Summary": "Forces garbage collection.", + "Group": "Server" + }, + { + "Command": "SECONDARYOF", + "Name": "SECONDARYOF", + "Summary": "Configures a server as secondary of another, or promotes it to a primary.", + "Group": "Server", + "Complexity": "O(1)", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "HOST-PORT", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "HOST", + "DisplayText": "host", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PORT", + "DisplayText": "port", + "Type": "Integer" + } + ] + }, + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "NO-ONE", + "Type": "Block", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NO", + "DisplayText": "no", + "Type": "PureToken", + "Token": "NO" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ONE", + "DisplayText": "one", + "Type": "PureToken", + "Token": "ONE" + } + ] + } + ] + } + ] + }, + { + "Command": "REGISTERCS", + "Name": "REGISTERCS", + "Group": "Server", + "Summary": "Registers custom C# commands in Garnet.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CMD", + "Type": "Block", + "ArgumentFlags": "Multiple", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "CMDTYPE", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "READ", + "DisplayText": "read", + "Type": "PureToken", + "Token": "READ" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "RMW", + "DisplayText": "rmw", + "Type": "PureToken", + "Token": "RMW" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXN", + "DisplayText": "txn", + "Type": "PureToken", + "Token": "TXN" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NAME", + "Summary": "Name of the command to register", + "DisplayText": "cmdName", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "NUMPARAMS", + "Summary": "Numer of parameters of the command to register", + "DisplayText": "numParams", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "CLASSNAME", + "Summary": "Name of class defining the command to register", + "DisplayText": "className", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "EXPTICKS", + "Summary": "Expiry of the command to register (in ticks)", + "DisplayText": "expTicks", + "Type": "Integer", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OBJCMDNAME", + "Summary": "The object command name, if applicable", + "DisplayText": "objCmdName", + "Type": "String", + "ArgumentFlags": "Optional" + } + ] + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "INFO", + "Summary": "Path to JSON-serialized command info for registered commands", + "DisplayText": "infoPath", + "Type": "String", + "Token": "INFO", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "DOCS", + "Summary": "Path to JSON-serialized command docs for registered commands", + "DisplayText": "docsPath", + "Type": "String", + "Token": "DOCS", + "ArgumentFlags": "Optional" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SRC", + "Summary": "Source paths containing classes defining command to register", + "DisplayText": "srcPath", + "Type": "String", + "Token": "SRC", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "RUNTXP", + "Name": "RUNTXP", + "Group": "Transactions", + "Summary": "Run registered transaction in Garnet.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXNID", + "Summary": "Registered transaction ID", + "DisplayText": "txnId", + "Type": "Integer" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "TXNPARAM", + "Summary": "Transaction parameter", + "DisplayText": "param", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "WATCH", + "Name": "WATCH", + "Group": "Transactions", + "Summary": "Monitors changes to keys to determine the execution of a transaction.", + "Complexity": "O(1) for every key.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ], + "SubCommands": [ + { + "Command": "WATCH_MS", + "Name": "WATCH|MS", + "Group": "Transactions", + "Summary": "Monitors changes to keys in main store to determine the execution of a transaction.", + "Complexity": "O(1) for every key.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "WATCH_OS", + "Name": "WATCH|OS", + "Group": "Transactions", + "Summary": "Monitors changes to keys in object store to determine the execution of a transaction.", + "Complexity": "O(1) for every key.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "ArgumentFlags": "Multiple", + "KeySpecIndex": 0 + } + ] + } + ] + }, + { + "Command": "ASYNC", + "Name": "ASYNC", + "Group": "Generic", + "Summary": "Start, stop or issue a barrier command for a series of async operations.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandContainerArgument", + "Name": "ARGS", + "Type": "OneOf", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "ON", + "DisplayText": "on", + "Type": "PureToken", + "Token": "ON" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "OFF", + "DisplayText": "off", + "Type": "PureToken", + "Token": "OFF" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "BARRIER", + "DisplayText": "barrier", + "Type": "PureToken", + "Token": "BARRIER" + } + ] + } + ] + }, + { + "Command": "MODULE", + "Name": "MODULE", + "Group": "Server", + "Summary": "A container for module commands.", + "Complexity": "Depends on subcommand.", + "SubCommands": [ + { + "Command": "MODULE_LOADCS", + "Name": "MODULE|LOADCS", + "Group": "Server", + "Summary": "Load a C# module in Garnet.", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "SRCPATH", + "Summary": "Source path to module definition", + "DisplayText": "srcPath", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "MODULEARG", + "Summary": "Module argument", + "DisplayText": "arg", + "Type": "String", + "ArgumentFlags": "Multiple" + } + ] + } + ] + } +] \ No newline at end of file diff --git a/playground/CommandInfoUpdater/GarnetCommandsInfo.json b/playground/CommandInfoUpdater/GarnetCommandsInfo.json index c0d89959be..8a4965cc87 100644 --- a/playground/CommandInfoUpdater/GarnetCommandsInfo.json +++ b/playground/CommandInfoUpdater/GarnetCommandsInfo.json @@ -13,7 +13,7 @@ "KeySpecifications": null, "SubCommands": [ { - "Command": "CLUSTER", + "Command": "CLUSTER_AOFSYNC", "Name": "CLUSTER|AOFSYNC", "IsInternal": true, "Arity": 3, @@ -26,7 +26,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_APPENDLOG", "Name": "CLUSTER|APPENDLOG", "IsInternal": true, "Arity": 6, @@ -39,7 +39,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_BANLIST", "Name": "CLUSTER|BANLIST", "IsInternal": true, "Arity": 1, @@ -52,7 +52,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_BEGIN_REPLICA_RECOVER", "Name": "CLUSTER|BEGIN_REPLICA_RECOVER", "IsInternal": true, "Arity": 8, @@ -65,7 +65,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELKEYSINSLOT", "Name": "CLUSTER|DELKEYSINSLOT", "IsInternal": true, "Arity": 2, @@ -78,7 +78,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_DELKEYSINSLOTRANGE", "Name": "CLUSTER|DELKEYSINSLOTRANGE", "IsInternal": true, "Arity": -3, @@ -91,7 +91,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_ENDPOINT", "Name": "CLUSTER|ENDPOINT", "IsInternal": true, "Arity": 2, @@ -104,7 +104,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FAILSTOPWRITES", "Name": "CLUSTER|FAILSTOPWRITES", "IsInternal": true, "Arity": 2, @@ -117,7 +117,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_FAILREPLICATIONOFFSET", "Name": "CLUSTER|FAILREPLICATIONOFFSET", "IsInternal": true, "Arity": 2, @@ -130,7 +130,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_GOSSIP", "Name": "CLUSTER|GOSSIP", "IsInternal": true, "Arity": -2, @@ -143,7 +143,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_HELP", "Name": "CLUSTER|HELP", "IsInternal": false, "Arity": 1, @@ -156,7 +156,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_INITIATE_REPLICA_SYNC", "Name": "CLUSTER|INITIATE_REPLICA_SYNC", "IsInternal": true, "Arity": 6, @@ -169,7 +169,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MIGRATE", "Name": "CLUSTER|MIGRATE", "IsInternal": true, "Arity": 4, @@ -182,7 +182,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MTASKS", "Name": "CLUSTER|MTASKS", "IsInternal": true, "Arity": 1, @@ -195,7 +195,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_MYPARENTID", "Name": "CLUSTER|MYPARENTID", "IsInternal": true, "Arity": 1, @@ -208,7 +208,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SEND_CKPT_FILE_SEGMENT", "Name": "CLUSTER|SEND_CKPT_FILE_SEGMENT", "IsInternal": true, "Arity": 6, @@ -221,7 +221,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SEND_CKPT_METADATA", "Name": "CLUSTER|SEND_CKPT_METADATA", "IsInternal": true, "Arity": 4, @@ -234,7 +234,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SETSLOTSRANGE", "Name": "CLUSTER|SETSLOTSRANGE", "IsInternal": true, "Arity": -4, @@ -247,7 +247,7 @@ "SubCommands": null }, { - "Command": "CLUSTER", + "Command": "CLUSTER_SLOTSTATE", "Name": "CLUSTER|SLOTSTATE", "IsInternal": true, "Arity": 1, @@ -370,7 +370,7 @@ "KeySpecifications": null, "SubCommands": [ { - "Command": "LATENCY", + "Command": "LATENCY_HELP", "Name": "LATENCY|HELP", "IsInternal": false, "Arity": 0, @@ -453,7 +453,7 @@ ], "SubCommands": [ { - "Command": "WATCH", + "Command": "WATCH_MS", "Name": "WATCH|MS", "IsInternal": false, "Arity": -3, @@ -467,7 +467,7 @@ "SubCommands": null }, { - "Command": "WATCH", + "Command": "WATCH_OS", "Name": "WATCH|OS", "IsInternal": false, "Arity": -3, @@ -511,7 +511,7 @@ "KeySpecifications": null, "SubCommands": [ { - "Command": "MODULE", + "Command": "MODULE_LOADCS", "Name": "MODULE|LOADCS", "IsInternal": false, "Arity": -3, diff --git a/playground/CommandInfoUpdater/Options.cs b/playground/CommandInfoUpdater/Options.cs index bb714b62b2..57f6e46e16 100644 --- a/playground/CommandInfoUpdater/Options.cs +++ b/playground/CommandInfoUpdater/Options.cs @@ -13,8 +13,8 @@ public class Options [Option('h', "host", Required = false, Default = "127.0.0.1", HelpText = "RESP server host to query")] public string RespServerHost { get; set; } - [Option('o', "output", Required = true, HelpText = "Output path for updated JSON file")] - public string OutputPath { get; set; } + [Option('o', "output", Required = false, HelpText = "Output directory for updated JSON files")] + public string OutputDir { get; set; } [Option('f', "force", Required = false, Default = false, HelpText = "Force overwrite existing commands info")] public bool Force { get; set; } diff --git a/playground/CommandInfoUpdater/Program.cs b/playground/CommandInfoUpdater/Program.cs index 4260a5f58c..6ab35ff1b9 100644 --- a/playground/CommandInfoUpdater/Program.cs +++ b/playground/CommandInfoUpdater/Program.cs @@ -57,8 +57,12 @@ static void Main(string[] args) return; } - CommandInfoUpdater.CommandInfoUpdater.TryUpdateCommandInfo(config.OutputPath, config.RespServerPort, - localRedisHost, config.IgnoreCommands, config.Force, logger); + if (!CommandInfoUpdater.CommandInfoUpdater.TryUpdateCommandInfo(config.OutputDir, config.RespServerPort, + localRedisHost, config.IgnoreCommands, config.Force, logger, out var updatedCommandsInfo)) + return; + + CommandDocsUpdater.TryUpdateCommandDocs(config.OutputDir, config.RespServerPort, + localRedisHost, config.IgnoreCommands, updatedCommandsInfo, config.Force, logger); } static void DisplayHelp(ParserResult result, IEnumerable errs) diff --git a/playground/CommandInfoUpdater/RespCommandDocsParser.cs b/playground/CommandInfoUpdater/RespCommandDocsParser.cs new file mode 100644 index 0000000000..391929819e --- /dev/null +++ b/playground/CommandInfoUpdater/RespCommandDocsParser.cs @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +using Garnet.common; +using Garnet.server; +using StackExchange.Redis; + +namespace CommandInfoUpdater +{ + public class RespCommandDocsParser + { + /// + /// Tries to parse RespCommandDocs from RESP format + /// + /// RedisResult returned by SE.Redis client + /// Index from which to start reading RedisResult + /// Mapping between command name and Garnet RespCommand and ArrayCommand values + /// Parsed RespCommandDocs object + /// + /// Name of parent command, null if none + /// True if parsing successful + public static bool TryReadFromResp(RedisResult result, int resultStartIdx, IReadOnlyDictionary supportedCommands, out RespCommandDocs cmdDocs, out string cmdName, string parentCommand = null) + { + cmdDocs = default; + cmdName = default; + string summary = null; + var group = RespCommandGroup.None; + string complexity = null; + var docFlags = RespCommandDocFlags.None; + string replacedBy = null; + RespCommandDocs[] subCommands = null; + RespCommandArgumentBase[] arguments = null; + + if (result.Length - resultStartIdx < 2) return false; + + if (result[resultStartIdx].Resp3Type != ResultType.BulkString) return false; + cmdName = result[resultStartIdx].ToString().ToUpper(); + + if (result[resultStartIdx + 1].Resp3Type != ResultType.Array) return false; + var elemCount = result[resultStartIdx + 1].Length; + + var elemArr = result[resultStartIdx + 1]; + for (var i = 0; i < elemCount; i += 2) + { + var elemKey = elemArr[i]; + if (elemKey.Resp3Type != ResultType.BulkString) return false; + var key = elemKey.ToString(); + + var elemVal = elemArr[i + 1]; + if (string.Equals(key, "summary")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + summary = elemVal.ToString(); + } + else if (string.Equals(key, "group")) + { + if (elemVal.Resp3Type != ResultType.BulkString || + !EnumUtils.TryParseEnumFromDescription(elemVal.ToString(), out group)) return false; + } + else if (string.Equals(key, "complexity")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + complexity = elemVal.ToString(); + } + else if (string.Equals(key, "doc_flags")) + { + if (elemVal.Resp3Type != ResultType.Array) return false; + var flagsCount = elemVal.Length; + for (var j = 0; j < flagsCount; j++) + { + if (elemVal[j].Resp3Type != ResultType.SimpleString || + !EnumUtils.TryParseEnumFromDescription(elemVal[j].ToString(), + out var flag)) + continue; + docFlags |= flag; + } + } + else if (string.Equals(key, "replaced_by")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + replacedBy = elemVal.ToString(); + } + else if (string.Equals(key, "arguments")) + { + if (elemVal.Resp3Type != ResultType.Array) return false; + var argCount = elemVal.Length; + arguments = new RespCommandArgumentBase[argCount]; + for (var j = 0; j < argCount; j++) + { + if (!RespCommandArgumentParser.TryReadFromResp(elemVal[j], out var arg)) + return false; + arguments[j] = arg; + } + } + else if (string.Equals(key, "subcommands")) + { + if (elemVal.Resp3Type != ResultType.Array) return false; + var scCount = elemVal.Length / 2; + subCommands = new RespCommandDocs[scCount]; + for (var j = 0; j < scCount; j++) + { + if (!TryReadFromResp(elemVal, j * 2, supportedCommands, out var subCommand, out _, cmdName)) + return false; + subCommands[j] = subCommand; + } + } + } + + cmdDocs = new RespCommandDocs(supportedCommands.GetValueOrDefault(cmdName, RespCommand.NONE), cmdName, summary, group, complexity, + docFlags, replacedBy, arguments, subCommands); + + return true; + } + } + + internal class RespCommandArgumentParser + { + /// + /// Tries to parse RespCommandArgumentBase from RESP format + /// + /// + /// Parsed RespCommandArgumentBase object + /// True if parsing successful + internal static bool TryReadFromResp(RedisResult result, out RespCommandArgumentBase cmdArg) + { + cmdArg = default; + string name = null; + string displayText = null; + var argType = RespCommandArgumentType.None; + var keySpecIdx = -1; + string token = null; + string summary = null; + var flags = RespCommandArgumentFlags.None; + string strVal = null; + RespCommandArgumentBase[] nestedArgsVal = default; + + if (result.Resp3Type != ResultType.Array) return false; + var elemCount = result.Length; + + for (var i = 0; i < elemCount; i += 2) + { + var elemKey = result[i]; + if (elemKey.Resp3Type != ResultType.BulkString) return false; + var key = elemKey.ToString(); + + var elemVal = result[i + 1]; + + if (string.Equals(key, "name")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + name = elemVal.ToString().ToUpper(); + } + else if (string.Equals(key, "display_text")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + displayText = elemVal.ToString(); + } + else if (string.Equals(key, "type")) + { + if (elemVal.Resp3Type != ResultType.BulkString || + !EnumUtils.TryParseEnumFromDescription(elemVal.ToString(), out argType)) + return false; + } + else if (string.Equals(key, "key_spec_index")) + { + if (elemVal.Resp3Type != ResultType.Integer || !int.TryParse(elemVal.ToString(), out keySpecIdx)) + return false; + } + else if (string.Equals(key, "token")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + token = elemVal.ToString(); + } + else if (string.Equals(key, "summary")) + { + if (elemVal.Resp3Type != ResultType.BulkString) return false; + summary = elemVal.ToString(); + } + else if (string.Equals(key, "flags")) + { + if (elemVal.Resp3Type != ResultType.Array) return false; + var flagsCount = elemVal.Length; + for (var j = 0; j < flagsCount; j++) + { + if (elemVal[j].Resp3Type != ResultType.SimpleString || + !EnumUtils.TryParseEnumFromDescription(elemVal[j].ToString(), + out var flag)) + continue; + flags |= flag; + } + } + else if (string.Equals(key, "value")) + { + if (argType == RespCommandArgumentType.None) + return false; + if (elemVal.Resp3Type != ResultType.BulkString) return false; + strVal = elemVal.ToString(); + } + else if (string.Equals(key, "arguments")) + { + if (argType != RespCommandArgumentType.OneOf && argType != RespCommandArgumentType.Block) + return false; + + if (elemVal.Resp3Type != ResultType.Array) return false; + var argCount = elemVal.Length; + nestedArgsVal = new RespCommandArgumentBase[argCount]; + for (var j = 0; j < argCount; j++) + { + if (!TryReadFromResp(elemVal[j], out var arg)) + return false; + nestedArgsVal[j] = arg; + } + } + } + + if (name == string.Empty || argType == RespCommandArgumentType.None || + (argType == RespCommandArgumentType.Key && keySpecIdx == -1)) return false; + + cmdArg = argType switch + { + RespCommandArgumentType.Key => new RespCommandKeyArgument(name, displayText, token, summary, flags, strVal, keySpecIdx), + RespCommandArgumentType.OneOf or RespCommandArgumentType.Block => new RespCommandContainerArgument(name, + displayText, argType, token, summary, flags, nestedArgsVal), + _ => new RespCommandBasicArgument(name, displayText, argType, token, summary, flags, strVal) + }; + + return true; + } + } +} \ No newline at end of file diff --git a/libs/server/Resp/RespCommandInfoParser.cs b/playground/CommandInfoUpdater/RespCommandInfoParser.cs similarity index 99% rename from libs/server/Resp/RespCommandInfoParser.cs rename to playground/CommandInfoUpdater/RespCommandInfoParser.cs index b79b259819..a1c16d72ed 100644 --- a/libs/server/Resp/RespCommandInfoParser.cs +++ b/playground/CommandInfoUpdater/RespCommandInfoParser.cs @@ -27,7 +27,11 @@ public static unsafe bool TryReadFromResp(ref byte* ptr, byte* end, IReadOnlyDic commandInfo = default; // Command info is null - if (new ReadOnlySpan(ptr, 5).SequenceEqual("$-1\r\n"u8)) return true; + if (new ReadOnlySpan(ptr, 5).SequenceEqual("$-1\r\n"u8)) + { + ptr += 5; + return true; + } // Verify command info array length if (!RespReadUtils.ReadUnsignedArrayLength(out var infoElemCount, ref ptr, end) @@ -99,8 +103,9 @@ public static unsafe bool TryReadFromResp(ref byte* ptr, byte* end, IReadOnlyDic commandInfo = new RespCommandsInfo() { - Command = supportedCommands[parentCommand ?? name], + Command = supportedCommands.GetValueOrDefault(name, RespCommand.NONE), Name = name.ToUpper(), + IsInternal = false, Arity = arity, Flags = flags, FirstKey = firstKey, diff --git a/playground/CommandInfoUpdater/SupportedCommand.cs b/playground/CommandInfoUpdater/SupportedCommand.cs index af40277e07..d8cbb9432f 100644 --- a/playground/CommandInfoUpdater/SupportedCommand.cs +++ b/playground/CommandInfoUpdater/SupportedCommand.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +using System.Collections.ObjectModel; using Garnet.server; namespace CommandInfoUpdater @@ -13,14 +14,14 @@ public class SupportedCommand private static readonly SupportedCommand[] AllSupportedCommands = [ new("ACL", RespCommand.ACL, [ - "ACL|CAT", - "ACL|DELUSER", - "ACL|LIST", - "ACL|LOAD", - "ACL|SAVE", - "ACL|SETUSER", - "ACL|USERS", - "ACL|WHOAMI", + new("ACL|CAT", RespCommand.ACL_CAT), + new("ACL|DELUSER", RespCommand.ACL_DELUSER), + new("ACL|LIST", RespCommand.ACL_LIST), + new("ACL|LOAD", RespCommand.ACL_LOAD), + new("ACL|SAVE", RespCommand.ACL_SAVE), + new("ACL|SETUSER", RespCommand.ACL_SETUSER), + new("ACL|USERS", RespCommand.ACL_USERS), + new("ACL|WHOAMI", RespCommand.ACL_WHOAMI), ]), new("APPEND", RespCommand.APPEND), new("ASKING", RespCommand.ASKING), @@ -35,61 +36,68 @@ public class SupportedCommand new("BLPOP", RespCommand.BLPOP), new("BRPOP", RespCommand.BRPOP), new("BLMOVE", RespCommand.BLMOVE), - new("CLIENT", RespCommand.CLIENT), + new("CLIENT", RespCommand.CLIENT, + [ + new("CLIENT|ID", RespCommand.CLIENT_ID), + new("CLIENT|INFO", RespCommand.CLIENT_INFO), + new("CLIENT|LIST", RespCommand.CLIENT_LIST), + new("CLIENT|KILL", RespCommand.CLIENT_KILL), + ]), new("CLUSTER", RespCommand.CLUSTER, [ - "CLUSTER|ADDSLOTS", - "CLUSTER|ADDSLOTSRANGE", - "CLUSTER|AOFSYNC", - "CLUSTER|APPENDLOG", - "CLUSTER|BANLIST", - "CLUSTER|BEGIN_REPLICA_RECOVER", - "CLUSTER|BUMPEPOCH", - "CLUSTER|COUNTKEYSINSLOT", - "CLUSTER|DELKEYSINSLOT", - "CLUSTER|DELKEYSINSLOTRANGE", - "CLUSTER|DELSLOTS", - "CLUSTER|DELSLOTSRANGE", - "CLUSTER|ENDPOINT", - "CLUSTER|FAILOVER", - "CLUSTER|FAILREPLICATIONOFFSET", - "CLUSTER|FAILSTOPWRITES", - "CLUSTER|FORGET", - "CLUSTER|GETKEYSINSLOT", - "CLUSTER|GOSSIP", - "CLUSTER|HELP", - "CLUSTER|INFO", - "CLUSTER|INITIATE_REPLICA_SYNC", - "CLUSTER|KEYSLOT", - "CLUSTER|MEET", - "CLUSTER|MIGRATE", - "CLUSTER|MTASKS", - "CLUSTER|MYID", - "CLUSTER|MYPARENTID", - "CLUSTER|NODES", - "CLUSTER|REPLICAS", - "CLUSTER|REPLICATE", - "CLUSTER|RESET", - "CLUSTER|SEND_CKPT_FILE_SEGMENT", - "CLUSTER|SEND_CKPT_METADATA", - "CLUSTER|SET-CONFIG-EPOCH", - "CLUSTER|SETSLOT", - "CLUSTER|SETSLOTSRANGE", - "CLUSTER|SHARDS", - "CLUSTER|SLOTS", - "CLUSTER|SLOTSTATE", + new("CLUSTER|ADDSLOTS", RespCommand.CLUSTER_ADDSLOTS), + new("CLUSTER|ADDSLOTSRANGE", RespCommand.CLUSTER_ADDSLOTSRANGE), + new("CLUSTER|AOFSYNC", RespCommand.CLUSTER_AOFSYNC), + new("CLUSTER|APPENDLOG", RespCommand.CLUSTER_APPENDLOG), + new("CLUSTER|BANLIST", RespCommand.CLUSTER_BANLIST), + new("CLUSTER|BEGIN_REPLICA_RECOVER", RespCommand.CLUSTER_BEGIN_REPLICA_RECOVER), + new("CLUSTER|BUMPEPOCH", RespCommand.CLUSTER_BUMPEPOCH), + new("CLUSTER|COUNTKEYSINSLOT", RespCommand.CLUSTER_COUNTKEYSINSLOT), + new("CLUSTER|DELKEYSINSLOT", RespCommand.CLUSTER_DELKEYSINSLOT), + new("CLUSTER|DELKEYSINSLOTRANGE", RespCommand.CLUSTER_DELKEYSINSLOTRANGE), + new("CLUSTER|DELSLOTS", RespCommand.CLUSTER_DELSLOTS), + new("CLUSTER|DELSLOTSRANGE", RespCommand.CLUSTER_DELSLOTSRANGE), + new("CLUSTER|ENDPOINT", RespCommand.CLUSTER_ENDPOINT), + new("CLUSTER|FAILOVER", RespCommand.CLUSTER_FAILOVER), + new("CLUSTER|FAILREPLICATIONOFFSET", RespCommand.CLUSTER_FAILREPLICATIONOFFSET), + new("CLUSTER|FAILSTOPWRITES", RespCommand.CLUSTER_FAILSTOPWRITES), + new("CLUSTER|FORGET", RespCommand.CLUSTER_FORGET), + new("CLUSTER|GETKEYSINSLOT", RespCommand.CLUSTER_GETKEYSINSLOT), + new("CLUSTER|GOSSIP", RespCommand.CLUSTER_GOSSIP), + new("CLUSTER|HELP", RespCommand.CLUSTER_HELP), + new("CLUSTER|INFO", RespCommand.CLUSTER_INFO), + new("CLUSTER|INITIATE_REPLICA_SYNC", RespCommand.CLUSTER_INITIATE_REPLICA_SYNC), + new("CLUSTER|KEYSLOT", RespCommand.CLUSTER_KEYSLOT), + new("CLUSTER|MEET", RespCommand.CLUSTER_MEET), + new("CLUSTER|MIGRATE", RespCommand.CLUSTER_MIGRATE), + new("CLUSTER|MTASKS", RespCommand.CLUSTER_MTASKS), + new("CLUSTER|MYID", RespCommand.CLUSTER_MYID), + new("CLUSTER|MYPARENTID", RespCommand.CLUSTER_MYPARENTID), + new("CLUSTER|NODES", RespCommand.CLUSTER_NODES), + new("CLUSTER|REPLICAS", RespCommand.CLUSTER_REPLICAS), + new("CLUSTER|REPLICATE", RespCommand.CLUSTER_REPLICATE), + new("CLUSTER|RESET", RespCommand.CLUSTER_RESET), + new("CLUSTER|SEND_CKPT_FILE_SEGMENT", RespCommand.CLUSTER_SEND_CKPT_FILE_SEGMENT), + new("CLUSTER|SEND_CKPT_METADATA", RespCommand.CLUSTER_SEND_CKPT_METADATA), + new("CLUSTER|SET-CONFIG-EPOCH", RespCommand.CLUSTER_SETCONFIGEPOCH), + new("CLUSTER|SETSLOT", RespCommand.CLUSTER_SETSLOT), + new("CLUSTER|SETSLOTSRANGE", RespCommand.CLUSTER_SETSLOTSRANGE), + new("CLUSTER|SHARDS", RespCommand.CLUSTER_SHARDS), + new("CLUSTER|SLOTS", RespCommand.CLUSTER_SLOTS), + new("CLUSTER|SLOTSTATE", RespCommand.CLUSTER_SLOTSTATE), ]), new("COMMAND", RespCommand.COMMAND, [ - "COMMAND|INFO", - "COMMAND|COUNT", + new("COMMAND|INFO", RespCommand.COMMAND_INFO), + new("COMMAND|COUNT", RespCommand.COMMAND_COUNT), + new("COMMAND|DOCS", RespCommand.COMMAND_DOCS), ]), new("COMMITAOF", RespCommand.COMMITAOF), new("CONFIG", RespCommand.CONFIG, [ - "CONFIG|GET", - "CONFIG|SET", - "CONFIG|REWRITE" + new("CONFIG|GET", RespCommand.CONFIG_GET), + new("CONFIG|SET", RespCommand.CONFIG_SET), + new("CONFIG|REWRITE", RespCommand.CONFIG_REWRITE), ]), new("COSCAN", RespCommand.COSCAN), new("CustomRawStringCmd", RespCommand.CustomRawStringCmd), @@ -105,6 +113,8 @@ public class SupportedCommand new("EXEC", RespCommand.EXEC), new("EXISTS", RespCommand.EXISTS), new("EXPIRE", RespCommand.EXPIRE), + new("EXPIREAT", RespCommand.EXPIREAT), + new("EXPIRETIME", RespCommand.EXPIRETIME), new("FAILOVER", RespCommand.FAILOVER), new("FLUSHALL", RespCommand.FLUSHALL), new("FLUSHDB", RespCommand.FLUSHDB), @@ -142,9 +152,9 @@ public class SupportedCommand new("LASTSAVE", RespCommand.LASTSAVE), new("LATENCY", RespCommand.LATENCY, [ - "LATENCY|HELP", - "LATENCY|HISTOGRAM", - "LATENCY|RESET" + new("LATENCY|HELP", RespCommand.LATENCY_HELP), + new("LATENCY|HISTOGRAM", RespCommand.LATENCY_HISTOGRAM), + new("LATENCY|RESET", RespCommand.LATENCY_RESET), ]), new("LINDEX", RespCommand.LINDEX), new("LINSERT", RespCommand.LINSERT), @@ -152,6 +162,7 @@ public class SupportedCommand new("LMOVE", RespCommand.LMOVE), new("LMPOP", RespCommand.LMPOP), new("LPOP", RespCommand.LPOP), + new("LPOS", RespCommand.LPOS), new("LPUSH", RespCommand.LPUSH), new("LPUSHX", RespCommand.LPUSHX), new("LRANGE", RespCommand.LRANGE), @@ -160,13 +171,14 @@ public class SupportedCommand new("LTRIM", RespCommand.LTRIM), new("MEMORY", RespCommand.MEMORY, [ - "MEMORY|USAGE" + new("MEMORY|USAGE", RespCommand.MEMORY_USAGE), ]), new("MGET", RespCommand.MGET), new("MIGRATE", RespCommand.MIGRATE), + new("PURGEBP", RespCommand.PURGEBP), new("MODULE", RespCommand.MODULE, [ - "MODULE|LOADCS", + new("MODULE|LOADCS", RespCommand.MODULE_LOADCS), ]), new("MONITOR", RespCommand.MONITOR), new("MSET", RespCommand.MSET), @@ -174,6 +186,8 @@ public class SupportedCommand new("MULTI", RespCommand.MULTI), new("PERSIST", RespCommand.PERSIST), new("PEXPIRE", RespCommand.PEXPIRE), + new("PEXPIREAT", RespCommand.PEXPIREAT), + new("PEXPIRETIME", RespCommand.PEXPIRETIME), new("PFADD", RespCommand.PFADD), new("PFCOUNT", RespCommand.PFCOUNT), new("PFMERGE", RespCommand.PFMERGE), @@ -188,6 +202,7 @@ public class SupportedCommand new("READONLY", RespCommand.READONLY), new("READWRITE", RespCommand.READWRITE), new("RENAME", RespCommand.RENAME), + new("RENAMENX", RespCommand.RENAMENX), new("REPLICAOF", RespCommand.REPLICAOF), new("RPOP", RespCommand.RPOP), new("RPOPLPUSH", RespCommand.RPOPLPUSH), @@ -228,8 +243,8 @@ public class SupportedCommand new("UNWATCH", RespCommand.UNWATCH), new("WATCH", RespCommand.WATCH, [ - "WATCH|MS", - "WATCH|OS", + new("WATCH|MS", RespCommand.WATCH_MS), + new("WATCH|OS", RespCommand.WATCH_OS), ]), new("ZADD", RespCommand.ZADD), new("ZCARD", RespCommand.ZCARD), @@ -258,10 +273,34 @@ public class SupportedCommand new("SCRIPT", RespCommand.SCRIPT), ]; - private static readonly Lazy> LazySupportedCommandsMap = + static readonly Lazy> LazySupportedCommandsMap = new(() => { - return AllSupportedCommands.ToDictionary(sc => sc.Command, sc => sc); + var map = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var supportedCommand in AllSupportedCommands) + { + map.Add(supportedCommand.Command, supportedCommand); + } + + return new ReadOnlyDictionary(map); + }); + + static readonly Lazy> LazySupportedCommandsFlattenedMap = + new(() => + { + var map = new Dictionary(SupportedCommandsMap, StringComparer.OrdinalIgnoreCase); + foreach (var supportedCommand in SupportedCommandsMap.Values) + { + if (supportedCommand.SubCommands != null) + { + foreach (var subCommand in supportedCommand.SubCommands) + { + map.Add(subCommand.Key, subCommand.Value); + } + } + } + + return new ReadOnlyDictionary(map); }); /// @@ -269,6 +308,11 @@ public class SupportedCommand /// public static IReadOnlyDictionary SupportedCommandsMap => LazySupportedCommandsMap.Value; + /// + /// Map between a supported command's and supported sub-command's name and its SupportedCommand object + /// + public static IReadOnlyDictionary SupportedCommandsFlattenedMap => LazySupportedCommandsFlattenedMap.Value; + /// /// Supported command's name /// @@ -277,7 +321,7 @@ public class SupportedCommand /// /// Supported command's sub-commands' names /// - public HashSet SubCommands { get; set; } + public IReadOnlyDictionary SubCommands { get; set; } /// /// Garnet RespCommand @@ -298,10 +342,10 @@ public SupportedCommand() /// Supported command name /// RESP Command enum /// List of supported sub-command names (optional) - public SupportedCommand(string command, RespCommand respCommand = RespCommand.NONE, IEnumerable subCommands = null) : this() + public SupportedCommand(string command, RespCommand respCommand = RespCommand.NONE, IEnumerable subCommands = null) : this() { Command = command; - SubCommands = subCommands == null ? null : new HashSet(subCommands); + SubCommands = subCommands?.ToDictionary(sc => sc.Command, sc => sc); RespCommand = respCommand; } } diff --git a/playground/Embedded.perftest/DummyNetworkSender.cs b/playground/Embedded.perftest/DummyNetworkSender.cs index 4024e318f4..59224556bf 100644 --- a/playground/Embedded.perftest/DummyNetworkSender.cs +++ b/playground/Embedded.perftest/DummyNetworkSender.cs @@ -1,7 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +using System.Net.Sockets; using System.Runtime.CompilerServices; +using System.Threading; using Garnet.networking; namespace Embedded.perftest @@ -46,6 +48,8 @@ public DummyNetworkSender() public string RemoteEndpointName => ""; + public string LocalEndpointName => ""; + /// /// Cleanup this DummyNetworkSender instance /// @@ -118,5 +122,11 @@ public void SendResponse(byte[] buffer, int offset, int count, object context) public void Throttle() { } + + /// + public bool TryClose() + { + return false; + } } } \ No newline at end of file diff --git a/playground/Embedded.perftest/EmbeddedRespServer.cs b/playground/Embedded.perftest/EmbeddedRespServer.cs index b466bcb0a7..9a9f41aa49 100644 --- a/playground/Embedded.perftest/EmbeddedRespServer.cs +++ b/playground/Embedded.perftest/EmbeddedRespServer.cs @@ -36,7 +36,7 @@ public EmbeddedRespServer(GarnetServerOptions opts, ILoggerFactory loggerFactory /// A new RESP server session internal RespServerSession GetRespSession() { - return new RespServerSession(new DummyNetworkSender(), storeWrapper, null, null, null, false); + return new RespServerSession(0, new DummyNetworkSender(), storeWrapper, null, null, false); } } } \ No newline at end of file diff --git a/playground/GarnetJSON/Module.cs b/playground/GarnetJSON/Module.cs index 0e2954d70f..fc791a04ed 100644 --- a/playground/GarnetJSON/Module.cs +++ b/playground/GarnetJSON/Module.cs @@ -2,7 +2,6 @@ // Licensed under the MIT license. using Garnet.server; -using Garnet.server.Module; using Microsoft.Extensions.Logging; namespace GarnetJSON diff --git a/playground/MigrateBench/MigrateRequest.cs b/playground/MigrateBench/MigrateRequest.cs index ff7aee9220..162fb6ff02 100644 --- a/playground/MigrateBench/MigrateRequest.cs +++ b/playground/MigrateBench/MigrateRequest.cs @@ -2,7 +2,9 @@ // Licensed under the MIT license. using System.Diagnostics; using System.Net; +using System.Runtime.CompilerServices; using Garnet.client; +using Garnet.common; using Microsoft.Extensions.Logging; namespace MigrateBench @@ -34,6 +36,17 @@ public class MigrateRequest IPEndPoint sourceNodeEndpoint; IPEndPoint targetNodeEndpoint; + public static T MeasureElapsed(long startTimestamp, TaskAwaiter awaiter, string msg, ref long totalElapsed, bool verbose = false, ILogger logger = null) + { + var result = awaiter.GetResult(); + var elapsed = Stopwatch.GetTimestamp() - startTimestamp; + totalElapsed += elapsed; + var t = TimeSpan.FromTicks(elapsed); + if (verbose) + logger?.LogInformation("[{msg}] ElapsedTime = {elapsed} seconds", msg, t.TotalSeconds); + return result; + } + public MigrateRequest(Options opts, ILogger logger = null) { this.opts = opts; @@ -44,8 +57,8 @@ public MigrateRequest(Options opts, ILogger logger = null) targetAddress = targetEndpoint[0]; targetPort = int.Parse(targetEndpoint[1]); - sourceNode = new(sourceAddress, sourcePort, bufferSize: 1 << 22); - targetNode = new(targetAddress, targetPort, bufferSize: 1 << 22); + sourceNode = new(sourceAddress, sourcePort, new NetworkBufferSettings(1 << 22), logger: logger); + targetNode = new(targetAddress, targetPort, new NetworkBufferSettings(1 << 22), logger: logger); this.timeout = (int)TimeSpan.FromSeconds(opts.Timeout).TotalMilliseconds; this.logger = logger; } @@ -57,16 +70,17 @@ public void Run() sourceNode.Connect(); targetNode.Connect(); - sourceNodeId = sourceNode.ExecuteAsync(["CLUSTER", "MYID"]).GetAwaiter().GetResult(); - targetNodeId = targetNode.ExecuteAsync(["CLUSTER", "MYID"]).GetAwaiter().GetResult(); + long myIdElapsed = 0; + sourceNodeId = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(["CLUSTER", "MYID"]).GetAwaiter(), "CLUSTER_MYID", ref myIdElapsed, opts.Verbose, logger); + targetNodeId = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync(["CLUSTER", "MYID"]).GetAwaiter(), "CLUSTER_MYID", ref myIdElapsed, opts.Verbose, logger); - var endpoint = sourceNode.ExecuteAsync(["CLUSTER", "ENDPOINT", sourceNodeId]).GetAwaiter().GetResult(); + var endpoint = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(["CLUSTER", "ENDPOINT", sourceNodeId]).GetAwaiter(), "CLUSTER_ENDPOINT", ref myIdElapsed, opts.Verbose, logger); if (!IPEndPoint.TryParse(endpoint, out sourceNodeEndpoint)) { logger?.LogError("ERR Source Endpoint ({endpoint}) is not valid!", endpoint); return; } - endpoint = targetNode.ExecuteAsync(["CLUSTER", "ENDPOINT", targetNodeId]).GetAwaiter().GetResult(); + endpoint = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync(["CLUSTER", "ENDPOINT", targetNodeId]).GetAwaiter(), "CLUSTER_ENDPOINT", ref myIdElapsed, opts.Verbose, logger); if (!IPEndPoint.TryParse(endpoint, out targetNodeEndpoint)) { logger?.LogError("ERR Target Endpoint ({endpoint}) is not valid!", endpoint); @@ -103,7 +117,7 @@ public void Run() int dbsize(ref GarnetClientSession c) { if (!opts.Dbsize) return 0; - var resp = c.ExecuteAsync("dbsize").GetAwaiter().GetResult(); + var resp = MeasureElapsed(Stopwatch.GetTimestamp(), c.ExecuteAsync("dbsize").GetAwaiter(), "DBSIZE", ref myIdElapsed, opts.Verbose, logger); return int.Parse(resp); } } @@ -145,7 +159,7 @@ private void MigrateSlotRanges() var elapsed = Stopwatch.GetTimestamp() - startTimestamp; var t = TimeSpan.FromTicks(elapsed); - logger?.LogInformation("SlotsRange Elapsed Time: {elapsed} seconds", t.TotalSeconds); + logger?.LogInformation("SLOTSRANGE Elapsed Time: {elapsed} seconds", t.TotalSeconds); } catch (Exception ex) { @@ -192,7 +206,7 @@ private void MigrateSlots() var elapsed = Stopwatch.GetTimestamp() - startTimestamp; var t = TimeSpan.FromTicks(elapsed); - logger?.LogInformation("SlotsRange Elapsed Time: {elapsed} seconds", t.TotalSeconds); + logger?.LogInformation("SLOTS Elapsed Time: {elapsed} seconds", t.TotalSeconds); } catch (Exception ex) { @@ -220,39 +234,58 @@ private void MigrateKeys() _slots.Add(j); } - var sourceNodeId = sourceNode.ExecuteAsync("cluster", "myid").GetAwaiter().GetResult(); - var targetNodeId = targetNode.ExecuteAsync("cluster", "myid").GetAwaiter().GetResult(); + long myIdElapsed = 0; + long setSlotElapsed = 0; + long countKeysElapsed = 0; + long getKeysElapsed = 0; + long migrateKeysElapsed = 0; + var sourceNodeId = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync("cluster", "myid").GetAwaiter(), "CLUSTER_MYID", ref myIdElapsed, opts.Verbose, logger); + var targetNodeId = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync("cluster", "myid").GetAwaiter(), "CLUSTER_MYID", ref myIdElapsed, opts.Verbose, logger); + string[] stable = ["cluster", "setslot", "", "stable"]; string[] migrating = ["cluster", "setslot", "", "migrating", targetNodeId]; string[] importing = ["cluster", "setslot", "", "importing", sourceNodeId]; string[] node = ["cluster", "setslot", "", "node", targetNodeId]; string[] countkeysinslot = ["cluster", "countkeysinslot", ""]; - string[] getkeysinslot = ["cluster", "getkeysinslot", ""]; + string[] getkeysinslot = ["cluster", "getkeysinslot", "", ""]; + var startTimestamp = Stopwatch.GetTimestamp(); foreach (var slot in _slots) { var slotStr = slot.ToString(); + stable[2] = slotStr; migrating[2] = slotStr; importing[2] = slotStr; node[2] = slotStr; countkeysinslot[2] = slotStr; getkeysinslot[2] = slotStr; - var resp = sourceNode.ExecuteAsync(migrating).GetAwaiter().GetResult(); - resp = targetNode.ExecuteAsync(importing).GetAwaiter().GetResult(); - resp = sourceNode.ExecuteAsync(countkeysinslot).GetAwaiter().GetResult(); - var keys = sourceNode.ExecuteForArrayAsync(getkeysinslot).GetAwaiter().GetResult(); + var resp = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(stable).GetAwaiter(), "SETSLOT_STABLE", ref setSlotElapsed, opts.Verbose, logger); + resp = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(migrating).GetAwaiter(), "SETSLOT_MIGRATING", ref setSlotElapsed, opts.Verbose, logger); + + resp = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync(stable).GetAwaiter(), "SETSLOT_STABLE", ref setSlotElapsed, opts.Verbose, logger); + resp = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync(importing).GetAwaiter(), "SETSLOT_IMPORTING", ref setSlotElapsed, opts.Verbose, logger); + + getkeysinslot[3] = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(countkeysinslot).GetAwaiter(), "COUNTKEYSINSLOT", ref countKeysElapsed, opts.Verbose, logger); + var keys = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteForArrayAsync(getkeysinslot).GetAwaiter(), "GETKEYSINSLOT", ref getKeysElapsed, opts.Verbose, logger); ICollection migrate = ["MIGRATE", targetNodeEndpoint.Address.ToString(), targetNodeEndpoint.Port.ToString(), "", "0", timeout.ToString(), "REPLACE", "KEYS"]; foreach (var key in keys) migrate.Add(key); var request = migrate.ToArray(); - logger?.LogInformation("KeyCount: {keys}", keys.Length); - resp = sourceNode.ExecuteAsync(request).GetAwaiter().GetResult(); + resp = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(request).GetAwaiter(), $"MIGRATE ({slot}, {keys.Length})", ref migrateKeysElapsed, opts.Verbose, logger); - resp = targetNode.ExecuteAsync(node).GetAwaiter().GetResult(); - resp = sourceNode.ExecuteAsync(node).GetAwaiter().GetResult(); + resp = MeasureElapsed(Stopwatch.GetTimestamp(), targetNode.ExecuteAsync(node).GetAwaiter(), "NODE_TARGET", ref setSlotElapsed, opts.Verbose, logger); + resp = MeasureElapsed(Stopwatch.GetTimestamp(), sourceNode.ExecuteAsync(node).GetAwaiter(), "NODE_SOURCE", ref setSlotElapsed, opts.Verbose, logger); } + + var elapsed = Stopwatch.GetTimestamp() - startTimestamp; + var t = TimeSpan.FromTicks(elapsed); + logger?.LogInformation("KEYS Elapsed Time: {elapsed} seconds", t.TotalSeconds); + logger?.LogInformation("SetSlot Elapsed Time: {elapsed} seconds", TimeSpan.FromTicks(setSlotElapsed).TotalSeconds); + logger?.LogInformation("CountKeys Elapsed Time: {elapsed} seconds", TimeSpan.FromTicks(countKeysElapsed).TotalSeconds); + logger?.LogInformation("GetKeys Elapsed Time: {elapsed} seconds", TimeSpan.FromTicks(getKeysElapsed).TotalSeconds); + logger?.LogInformation("MigrateKeys Elapsed Time: {elapsed} seconds", TimeSpan.FromTicks(migrateKeysElapsed).TotalSeconds); } catch (Exception ex) { diff --git a/playground/MigrateBench/Options.cs b/playground/MigrateBench/Options.cs index 858b1926a5..c686206e40 100644 --- a/playground/MigrateBench/Options.cs +++ b/playground/MigrateBench/Options.cs @@ -28,5 +28,8 @@ public partial class Options [Option("timeout", Required = false, Default = 10, HelpText = "Migrate timeout in seconds")] public int Timeout { get; set; } + + [Option("verbose", Required = false, Default = false, HelpText = "Print detailed information")] + public bool Verbose { get; set; } } } \ No newline at end of file diff --git a/playground/SampleModule/SampleModule.cs b/playground/SampleModule/SampleModule.cs index 4bf1c535f0..7463d25933 100644 --- a/playground/SampleModule/SampleModule.cs +++ b/playground/SampleModule/SampleModule.cs @@ -3,7 +3,6 @@ using Garnet; using Garnet.server; -using Garnet.server.Module; using Microsoft.Extensions.Logging; namespace SampleModule diff --git a/test/Garnet.test.cluster/ClusterManagementTests.cs b/test/Garnet.test.cluster/ClusterManagementTests.cs index 2a5f6b3fed..dcaff8956b 100644 --- a/test/Garnet.test.cluster/ClusterManagementTests.cs +++ b/test/Garnet.test.cluster/ClusterManagementTests.cs @@ -6,6 +6,7 @@ using System.Linq; using System.Net; using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.Logging; using NUnit.Framework; using NUnit.Framework.Legacy; @@ -145,20 +146,6 @@ public void ClusterResetTest() for (var i = 1; i < node_count; i++) _ = context.clusterTestUtils.ClusterForget(i, nodeIds[0], 10, context.logger); - try - { - // Add data to server - var resp = context.clusterTestUtils.GetServer(0).Execute("SET", "wxz", "1234"); - ClassicAssert.AreEqual("OK", (string)resp); - - resp = context.clusterTestUtils.GetServer(0).Execute("GET", "wxz"); - ClassicAssert.AreEqual("1234", (string)resp); - } - catch (Exception ex) - { - context.logger?.LogError(ex, "An error occured at ClusterResetTest"); - } - // Hard reset node state. clean db data and cluster config _ = context.clusterTestUtils.ClusterReset(0, soft: false, 10, context.logger); config = context.clusterTestUtils.ClusterNodes(0, context.logger); @@ -172,29 +159,167 @@ public void ClusterResetTest() //Add slotRange for clean node context.clusterTestUtils.AddSlotsRange(0, slotRanges, context.logger); - try + + // Add node back to the cluster + context.clusterTestUtils.SetConfigEpoch(0, 1, context.logger); + context.clusterTestUtils.Meet(0, 1, context.logger); + context.clusterTestUtils.WaitUntilNodeIsKnownByAllNodes(0, context.logger); + for (int i = 0; i < node_count; i++) { - // Check DB was flushed due to hard reset - var resp = context.clusterTestUtils.GetServer(0).Execute("GET", "wxz"); - ClassicAssert.IsTrue(resp.IsNull, "DB not flushed after HARD reset"); + Console.WriteLine(i); + context.clusterTestUtils.WaitUntilNodeIsKnownByAllNodes(i, context.logger); + } + } + + [Test, Order(4)] + public void ClusterResetFailsForMasterWithKeysInSlotsTest() + { + var node_count = 4; + context.CreateInstances(node_count); + context.CreateConnection(); + var (_, _) = context.clusterTestUtils.SimpleSetupCluster(node_count, 0, logger: context.logger); + + var expectedSlotRange = new SlotRange(0, 4095); + var config = context.clusterTestUtils.ClusterNodes(0, context.logger); + var node = config.Nodes.First(); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + byte[] key = new byte[16]; + context.clusterTestUtils.RandomBytesRestrictedToSlot(ref key, node.Slots.First().From); + + context.clusterTestUtils.GetServer(0).Execute("SET", key, "1234"); + string res = context.clusterTestUtils.GetServer(0).Execute("GET", key).ToString(); + ClassicAssert.AreEqual("1234", res); + + VerifyClusterResetFails(true); + VerifyClusterResetFails(false); + + // soft reset node state. clean db data and cluster config + var nodeIds = context.clusterTestUtils.GetNodeIds(logger: context.logger); + ClassicAssert.AreEqual(4, config.Nodes.Count); + ClassicAssert.AreEqual(nodeIds[0], node.NodeId); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + ClassicAssert.IsFalse(node.IsReplica); + } - // Add data to server - resp = context.clusterTestUtils.GetServer(0).Execute("SET", "wxz", "1234"); - ClassicAssert.AreEqual("OK", (string)resp); + [Test, Order(4)] + public void ClusterResetFailsForMasterWithKeysInSlotsObjectStoreTest() + { + var node_count = 4; + context.CreateInstances(node_count); + context.CreateConnection(); + var (_, _) = context.clusterTestUtils.SimpleSetupCluster(node_count, 0, logger: context.logger); + context.kvPairsObj = new Dictionary>(); + context.PopulatePrimaryWithObjects(ref context.kvPairsObj, 16, 10, 0); - resp = context.clusterTestUtils.GetServer(0).Execute("GET", "wxz"); - ClassicAssert.AreEqual("1234", (string)resp); + var expectedSlotRange = new SlotRange(0, 4095); + var config = context.clusterTestUtils.ClusterNodes(0, context.logger); + var node = config.Nodes.First(); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + byte[] key = new byte[16]; + context.clusterTestUtils.RandomBytesRestrictedToSlot(ref key, node.Slots.First().From); + + VerifyClusterResetFails(true); + VerifyClusterResetFails(false); + + var nodeIds = context.clusterTestUtils.GetNodeIds(logger: context.logger); + ClassicAssert.AreEqual(4, config.Nodes.Count); + ClassicAssert.AreEqual(nodeIds[0], node.NodeId); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + ClassicAssert.IsFalse(node.IsReplica); + } + + [Test, Order(4)] + public void ClusterResetAfterFLushAllTest() + { + var node_count = 4; + context.CreateInstances(node_count); + context.CreateConnection(); + var (_, _) = context.clusterTestUtils.SimpleSetupCluster(node_count, 0, logger: context.logger); + context.kvPairsObj = new Dictionary>(); + context.PopulatePrimaryWithObjects(ref context.kvPairsObj, 16, 10, 0); + + var expectedSlotRange = new SlotRange(0, 4095); + var config = context.clusterTestUtils.ClusterNodes(0, context.logger); + var node = config.Nodes.First(); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + byte[] key = new byte[16]; + context.clusterTestUtils.RandomBytesRestrictedToSlot(ref key, node.Slots.First().From); + + VerifyClusterResetFails(true); + VerifyClusterResetFails(false); + + var nodeIds = context.clusterTestUtils.GetNodeIds(logger: context.logger); + ClassicAssert.AreEqual(4, config.Nodes.Count); + ClassicAssert.AreEqual(nodeIds[0], node.NodeId); + ClassicAssert.AreEqual(expectedSlotRange, node.Slots[0]); + ClassicAssert.IsFalse(node.IsReplica); + + context.clusterTestUtils.FlushAll(0, context.logger); + _ = context.clusterTestUtils.ClusterReset(0, soft: false, 10, context.logger); + + config = context.clusterTestUtils.ClusterNodes(0, context.logger); + node = config.Nodes.First(); + // Assert node 0 does not know anything about the cluster + ClassicAssert.AreEqual(1, config.Nodes.Count); + ClassicAssert.AreNotEqual(nodeIds[0], node.NodeId); + ClassicAssert.AreEqual(0, node.Slots.Count); + ClassicAssert.IsFalse(node.IsReplica); + } + + private void VerifyClusterResetFails(bool softReset = true) + { + var server = context.clusterTestUtils.GetServer(0); + var args = new List() { + "reset", + softReset ? "soft" : "hard", + "60" + }; + + try + { + _ = (string)server.Execute("cluster", args); } - catch (Exception ex) + catch (RedisServerException ex) { - context.logger?.LogError(ex, "An error occured at ClusterResetTest"); + ClassicAssert.AreEqual("ERR CLUSTER RESET can't be called with master nodes containing keys", ex.Message); } + } - // Add node back to the cluster - context.clusterTestUtils.SetConfigEpoch(0, 1, context.logger); - context.clusterTestUtils.Meet(0, 1, context.logger); + [Test, Order(4)] + public async Task ClusterResetDisposesGossipConnections() + { + var node_count = 3; + context.CreateInstances(node_count, metricsSamplingFrequency: 1); + context.CreateConnection(); + var endpoints = context.clusterTestUtils.GetEndpoints(); + for (int i = 0; i < endpoints.Length - 1; i++) + { + context.clusterTestUtils.Meet(i, i + 1, context.logger); + } - context.clusterTestUtils.WaitUntilNodeIsKnownByAllNodes(0, context.logger); + for (int i = 0; i < node_count; i++) + { + context.clusterTestUtils.WaitUntilNodeIsKnownByAllNodes(i); + } + + await Task.Delay(1000); + + var server = context.clusterTestUtils.GetServer(0); + var gossipConnections = GetStat(server, "Stats", "gossip_open_connections"); + ClassicAssert.AreEqual(node_count - 1, int.Parse(gossipConnections), "Expected one gossip connection per node."); + + context.clusterTestUtils.ClusterReset(0, soft: true); + + await Task.Delay(1000); + + gossipConnections = GetStat(server, "Stats", "gossip_open_connections"); + ClassicAssert.AreEqual("0", gossipConnections, "All gossip connections should be closed after a reset."); + ClassicAssert.AreEqual(1, context.clusterTestUtils.ClusterNodes(0).Nodes.Count(), "Expected the node to only know about itself after a reset."); + } + + private string GetStat(IServer server, string section, string statName) + { + return server.Info(section).FirstOrDefault(x => x.Key == section)?.FirstOrDefault(x => x.Key == statName).Value; } [Test, Order(5)] @@ -283,7 +408,83 @@ public void ClusterRestartNodeDropGossip() } } - [Test] + [Test, Order(7)] + public void ClusterClientList() + { + const int NodeCount = 4; + context.CreateInstances(NodeCount, enableAOF: true, MainMemoryReplication: true, CommitFrequencyMs: -1); + context.CreateConnection(); + _ = context.clusterTestUtils.SimpleSetupCluster(NodeCount / 2, 1, logger: context.logger); + + // Check that all nodes have 4 connections + var numWithTwoMasterConnections = 0; + var numWithTwoReplicaConnections = 0; + + // Every node should have 1 normal connection and either 2 master + 1 replica, or 2 replica + 1 master + for (var nodeIx = 0; nodeIx < NodeCount; nodeIx++) + { + var fullList = (string)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[nodeIx], "CLIENT", ["LIST"]); + var numNormal = fullList.Split("\n").Count(static x => x.Contains(" flags=N ")); + var numReplica = fullList.Split("\n").Count(static x => x.Contains(" flags=S ")); + var numMaster = fullList.Split("\n").Count(static x => x.Contains(" flags=M ")); + + ClassicAssert.AreEqual(1, numNormal); + ClassicAssert.IsTrue(numReplica >= 1 && numReplica <= 2); + ClassicAssert.IsTrue(numMaster >= 1 && numMaster <= 2); + + if (numMaster == 1) + { + ClassicAssert.AreEqual(2, numReplica); + numWithTwoReplicaConnections++; + } + else + { + ClassicAssert.AreEqual(1, numReplica); + numWithTwoMasterConnections++; + } + + var replicaList = (string)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[nodeIx], "CLIENT", ["LIST", "TYPE", "REPLICA"]); + var masterList = (string)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[nodeIx], "CLIENT", ["LIST", "TYPE", "MASTER"]); + + ClassicAssert.AreEqual(numReplica, replicaList.Split("\n").Length); + ClassicAssert.AreEqual(numMaster, masterList.Split("\n").Length); + } + + ClassicAssert.AreEqual(2, numWithTwoMasterConnections); + ClassicAssert.AreEqual(2, numWithTwoReplicaConnections); + } + + [Test, Order(7)] + public void ClusterClientKill() + { + const int NodeCount = 4; + context.CreateInstances(NodeCount, enableAOF: true, MainMemoryReplication: true, CommitFrequencyMs: -1); + context.CreateConnection(); + _ = context.clusterTestUtils.SimpleSetupCluster(NodeCount / 2, 1, logger: context.logger); + + var killedMaster = (int)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[0], "CLIENT", ["KILL", "TYPE", "MASTER"]); + var killedReplica = (int)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[0], "CLIENT", ["KILL", "TYPE", "REPLICA"]); + + ClassicAssert.IsTrue(killedMaster >= 1); + ClassicAssert.IsTrue(killedReplica >= 1); + } + + [Test, Order(7)] + public void ClusterClientKillSlave() + { + // Test SLAVE separately - it's equivalent to REPLICA, but needed for compatibility + + const int NodeCount = 4; + context.CreateInstances(NodeCount, enableAOF: true, MainMemoryReplication: true, CommitFrequencyMs: -1); + context.CreateConnection(); + _ = context.clusterTestUtils.SimpleSetupCluster(NodeCount / 2, 1, logger: context.logger); + + var killed = (int)context.clusterTestUtils.Execute((IPEndPoint)context.endpoints[0], "CLIENT", ["KILL", "TYPE", "SLAVE"]); + + ClassicAssert.IsTrue(killed >= 1); + } + + [Test, Order(8)] public void FailoverBadOptions() { var node_count = 4; @@ -318,7 +519,7 @@ public void FailoverBadOptions() } } - [Test] + [Test, Order(9)] public void ClusterFailoverBadOptions() { var node_count = 4; @@ -353,7 +554,7 @@ public void ClusterFailoverBadOptions() } } - [Test] + [Test, Order(10)] public void ClusterSetSlotBadOptions() { var node_count = 4; diff --git a/test/Garnet.test.cluster/ClusterMigrateTests.cs b/test/Garnet.test.cluster/ClusterMigrateTests.cs index f89aebdd91..41ffbb68d6 100644 --- a/test/Garnet.test.cluster/ClusterMigrateTests.cs +++ b/test/Garnet.test.cluster/ClusterMigrateTests.cs @@ -266,12 +266,12 @@ public void ClusterSimpleSlotInfo() var sourceIndex = context.clusterTestUtils.GetSourceNodeIndexFromSlot((ushort)slot, context.logger); var expectedKeyCount = context.clusterTestUtils.CountKeysInSlot(slot, context.logger); - ClassicAssert.AreEqual(expectedKeyCount, keyCount); + ClassicAssert.AreEqual(keyCount, expectedKeyCount); _ = context.clusterTestUtils.CountKeysInSlot(-1, context.logger); _ = context.clusterTestUtils.CountKeysInSlot(ushort.MaxValue, context.logger); var result = context.clusterTestUtils.GetKeysInSlot(sourceIndex, slot, expectedKeyCount, context.logger); - ClassicAssert.AreEqual(result.Count, keyCount); + ClassicAssert.AreEqual(keyCount, result.Count); _ = context.clusterTestUtils.GetKeysInSlot(-1, expectedKeyCount); _ = context.clusterTestUtils.GetKeysInSlot(ushort.MaxValue, expectedKeyCount); @@ -1717,12 +1717,16 @@ private void ClusterMigrateExpirationWithVaryingPayload(bool expiration, List<(b context.clusterTestUtils.SetConfigEpoch(dstNodeIndex, dstNodeIndex + 2, logger: context.logger); context.clusterTestUtils.Meet(srcNodeIndex, dstNodeIndex, logger: context.logger); context.clusterTestUtils.WaitUntilNodeIsKnown(dstNodeIndex, srcNodeIndex, logger: context.logger); + context.clusterTestUtils.WaitUntilNodeIsKnown(srcNodeIndex, dstNodeIndex, logger: context.logger); var config1 = context.clusterTestUtils.ClusterNodes(srcNodeIndex, logger: context.logger); var config2 = context.clusterTestUtils.ClusterNodes(dstNodeIndex, logger: context.logger); ClassicAssert.AreEqual(config1.GetBySlot(0).NodeId, config2.GetBySlot(0).NodeId); + ClassicAssert.AreEqual(Shards, config1.Nodes.Count); + ClassicAssert.AreEqual(Shards, config2.Nodes.Count); + ClassicAssert.AreEqual(config1.Nodes.Last().NodeId, config2.Nodes.First().NodeId); + ClassicAssert.AreEqual(config2.Nodes.Last().NodeId, config1.Nodes.First().NodeId); var db = context.clusterTestUtils.GetDatabase(); - foreach (var pair in data) ClassicAssert.IsTrue(db.StringSet(pair.Item1, pair.Item2)); diff --git a/test/Garnet.test.cluster/ClusterTestContext.cs b/test/Garnet.test.cluster/ClusterTestContext.cs index e00559c4a8..12afd1ec8a 100644 --- a/test/Garnet.test.cluster/ClusterTestContext.cs +++ b/test/Garnet.test.cluster/ClusterTestContext.cs @@ -57,8 +57,10 @@ public void TearDown() waiter?.Dispose(); clusterTestUtils?.Dispose(); loggerFactory?.Dispose(); - DisposeCluster(); - TestUtils.DeleteDirectory(TestFolder, true); + if (!Task.Run(() => DisposeCluster()).Wait(TimeSpan.FromSeconds(15))) + logger?.LogError("Timed out waiting for DisposeCluster"); + if (!Task.Run(() => TestUtils.DeleteDirectory(TestFolder, true)).Wait(TimeSpan.FromSeconds(15))) + logger?.LogError("Timed out waiting for DisposeCluster"); } /// @@ -108,7 +110,8 @@ public void CreateInstances( X509CertificateCollection certificates = null, ServerCredential clusterCreds = new ServerCredential(), AadAuthenticationSettings authenticationSettings = null, - bool disablePubSub = true) + bool disablePubSub = true, + int metricsSamplingFrequency = 0) { endpoints = TestUtils.GetEndPoints(shards, 7000); nodes = TestUtils.CreateGarnetCluster( @@ -138,7 +141,8 @@ public void CreateInstances( authUsername: clusterCreds.user, authPassword: clusterCreds.password, certificates: certificates, - authenticationSettings: authenticationSettings); + authenticationSettings: authenticationSettings, + metricsSamplingFrequency: metricsSamplingFrequency); foreach (var node in nodes) node.Start(); diff --git a/test/Garnet.test.cluster/ClusterTestUtils.cs b/test/Garnet.test.cluster/ClusterTestUtils.cs index 73480a7ceb..ffc419f539 100644 --- a/test/Garnet.test.cluster/ClusterTestUtils.cs +++ b/test/Garnet.test.cluster/ClusterTestUtils.cs @@ -670,7 +670,7 @@ public GarnetClientSession GetGarnetClientSession(int nodeIndex) if (gcsConnections[nodeIndex] == null) { var endpoint = GetEndPoint(nodeIndex).ToIPEndPoint(); - gcsConnections[nodeIndex] = new GarnetClientSession(endpoint.Address.ToString(), endpoint.Port); + gcsConnections[nodeIndex] = new GarnetClientSession(endpoint.Address.ToString(), endpoint.Port, new()); gcsConnections[nodeIndex].Connect(); } return gcsConnections[nodeIndex]; @@ -1994,6 +1994,23 @@ public int ClusterKeySlot(IPEndPoint endPoint, string key, ILogger logger = null } } + public void FlushAll(int nodeIndex, ILogger logger = null) + => FlushAll((IPEndPoint)endpoints[nodeIndex], logger); + + public void FlushAll(IPEndPoint endPoint, ILogger logger = null) + { + try + { + var server = redis.GetServer(endPoint); + server.FlushAllDatabases(); + } + catch (Exception ex) + { + logger?.LogError(ex, "An error has occured; FlushAllDatabases"); + Assert.Fail(); + } + } + public ClusterConfiguration ClusterNodes(int nodeIndex, ILogger logger = null) => ClusterNodes((IPEndPoint)endpoints[nodeIndex], logger); diff --git a/test/Garnet.test/CacheSizeTrackerTests.cs b/test/Garnet.test/CacheSizeTrackerTests.cs index b0c857e14f..577e5dfa27 100644 --- a/test/Garnet.test/CacheSizeTrackerTests.cs +++ b/test/Garnet.test/CacheSizeTrackerTests.cs @@ -25,7 +25,7 @@ public class CacheSizeTrackerTests public void Setup() { TestUtils.DeleteDirectory(TestUtils.MethodTestDir, wait: true); - server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, MemorySize: "2k", PageSize: "512", lowMemory: true, objectStoreIndexSize: "1k", objectStoreTotalMemorySize: "8k"); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, MemorySize: "2k", PageSize: "512", lowMemory: true, objectStoreIndexSize: "1k", objectStoreHeapMemorySize: "5k"); server.Start(); objStore = server.Provider.StoreWrapper.objectStore; cacheSizeTracker = server.Provider.StoreWrapper.objectStoreSizeTracker; diff --git a/test/Garnet.test/CustomRespCommandsDocs.json b/test/Garnet.test/CustomRespCommandsDocs.json new file mode 100644 index 0000000000..2b763f43cb --- /dev/null +++ b/test/Garnet.test/CustomRespCommandsDocs.json @@ -0,0 +1,150 @@ +[ + { + "Command": "NONE", + "Name": "DELIFM", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "NONE", + "Name": "MGETIFPM", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PREFIX", + "DisplayText": "prefix", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0, + "ArgumentFlags": "Multiple" + } + ] + }, + { + "Command": "NONE", + "Name": "MYDICTGET", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + } + ] + }, + { + "Command": "NONE", + "Name": "MYDICTSET", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + } + ] + }, + { + "Command": "NONE", + "Name": "READWRITETX", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "READKEY", + "DisplayText": "readkey", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "WRITEKEY1", + "DisplayText": "writeKey1", + "Type": "Key", + "KeySpecIndex": 1 + }, + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "WRITEKEY2", + "DisplayText": "writeKey2", + "Type": "Key", + "KeySpecIndex": 2 + } + ] + }, + { + "Command": "NONE", + "Name": "SETIFPM", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PREFIX", + "DisplayText": "prefix", + "Type": "String" + } + ] + }, + { + "Command": "NONE", + "Name": "SETWPIFPGT", + "Arguments": [ + { + "TypeDiscriminator": "RespCommandKeyArgument", + "Name": "KEY", + "DisplayText": "key", + "Type": "Key", + "KeySpecIndex": 0 + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "VALUE", + "DisplayText": "value", + "Type": "String" + }, + { + "TypeDiscriminator": "RespCommandBasicArgument", + "Name": "PREFIX", + "DisplayText": "prefix", + "Type": "String" + } + ] + } +] \ No newline at end of file diff --git a/test/Garnet.test/Garnet.test.csproj b/test/Garnet.test/Garnet.test.csproj index 454cab426c..f60804d6c9 100644 --- a/test/Garnet.test/Garnet.test.csproj +++ b/test/Garnet.test/Garnet.test.csproj @@ -28,12 +28,6 @@ - - - PreserveNewest - - - @@ -66,6 +60,18 @@ + + PreserveNewest + + + + + + PreserveNewest + + + PreserveNewest + PreserveNewest diff --git a/test/Garnet.test/GarnetBitmapTests.cs b/test/Garnet.test/GarnetBitmapTests.cs index 25d910f929..88ad67c31c 100644 --- a/test/Garnet.test/GarnetBitmapTests.cs +++ b/test/Garnet.test/GarnetBitmapTests.cs @@ -168,7 +168,7 @@ public void BitmapSetGetBitTest_LTM(bool preSet) server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -450,7 +450,7 @@ public void BitmapBitCountTest_LTM() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -647,7 +647,7 @@ public void BitmapBitPosTest_LTM() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -1271,7 +1271,7 @@ public void BitmapBitfieldGetTest_LTM() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); //MemorySize: "16g", //PageSize: "32m"); @@ -1472,7 +1472,7 @@ public void BitmapBitfieldSetTest_LTM() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); //MemorySize: "16g", //PageSize: "32m"); @@ -1938,7 +1938,7 @@ public void BitmapBitfieldIncrTest_LTM() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: (bitmapBytes << 1).ToString(), + MemorySize: (bitmapBytes << 2).ToString(), PageSize: (bitmapBytes << 1).ToString()); //MemorySize: "16g", //PageSize: "32m"); diff --git a/test/Garnet.test/GarnetServerConfigTests.cs b/test/Garnet.test/GarnetServerConfigTests.cs index cdf2a062f7..318a751624 100644 --- a/test/Garnet.test/GarnetServerConfigTests.cs +++ b/test/Garnet.test/GarnetServerConfigTests.cs @@ -82,7 +82,7 @@ public void ImportExportConfigLocal() // No import path, include command line args, export to file // Check values from command line override values from defaults.conf static string GetFullExtensionBinPath(string testProjectName) => Path.GetFullPath(testProjectName, TestUtils.RootTestsProjectPath); - var args = new string[] { "--config-export-path", configPath, "-p", "4m", "-m", "128m", "-s", "2g", "--recover", "--port", "53", "--reviv-obj-bin-record-count", "2", "--reviv-fraction", "0.5", "--extension-bin-paths", $"{GetFullExtensionBinPath("Garnet.test")},{GetFullExtensionBinPath("Garnet.test.cluster")}" }; + var args = new string[] { "--config-export-path", configPath, "-p", "4m", "-m", "128m", "-s", "2g", "--recover", "--port", "53", "--reviv-obj-bin-record-count", "2", "--reviv-fraction", "0.5", "--extension-bin-paths", $"{GetFullExtensionBinPath("Garnet.test")},{GetFullExtensionBinPath("Garnet.test.cluster")}", "--loadmodulecs", $"{Assembly.GetExecutingAssembly().Location}" }; parseSuccessful = ServerSettingsManager.TryParseCommandLineArguments(args, out options, out invalidOptions); ClassicAssert.IsTrue(parseSuccessful); ClassicAssert.AreEqual(invalidOptions.Count, 0); @@ -95,6 +95,8 @@ public void ImportExportConfigLocal() ClassicAssert.IsTrue(options.Recover); ClassicAssert.IsTrue(File.Exists(configPath)); ClassicAssert.AreEqual(2, options.ExtensionBinPaths.Count()); + ClassicAssert.AreEqual(1, options.LoadModuleCS.Count()); + ClassicAssert.AreEqual(Assembly.GetExecutingAssembly().Location, options.LoadModuleCS.First()); // Import from previous export command, no command line args // Check values from import path override values from default.conf diff --git a/test/Garnet.test/HyperLogLogTests.cs b/test/Garnet.test/HyperLogLogTests.cs index 0aa761d2b3..c32f1cf5cb 100644 --- a/test/Garnet.test/HyperLogLogTests.cs +++ b/test/Garnet.test/HyperLogLogTests.cs @@ -571,13 +571,13 @@ public void HyperLogLogPFADD_LTM(int seqSize) if (seqSize < 128) server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: "512", + MemorySize: "1024", PageSize: "512"); else server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: "16384", - PageSize: "16384"); + MemorySize: "32k", + PageSize: "16k"); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -689,7 +689,7 @@ public void HyperLogLogTestPFMERGE_LTM_SparseToSparse() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: "512", + MemorySize: "1024", PageSize: "512"); server.Start(); @@ -798,8 +798,8 @@ public void HyperLogLogTestPFMERGE_LTM_SparseToDense(bool reverse) { server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, - MemorySize: "16384", - PageSize: "16384"); + MemorySize: "32k", + PageSize: "16k"); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -908,8 +908,8 @@ public void HyperLogLogTestPFMERGE_LTM_DenseToDense() server.Dispose(); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, lowMemory: true, - MemorySize: "16384", - PageSize: "16384"); + MemorySize: "32k", + PageSize: "16k"); server.Start(); using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); diff --git a/test/Garnet.test/LuaScriptRunnerTests.cs b/test/Garnet.test/LuaScriptRunnerTests.cs index e02c29850f..a9e07c0601 100644 --- a/test/Garnet.test/LuaScriptRunnerTests.cs +++ b/test/Garnet.test/LuaScriptRunnerTests.cs @@ -92,7 +92,7 @@ public void CanRunScript() { runner.Compile(); var res = runner.Run(keys, args); - ClassicAssert.AreEqual("arg2", res); + ClassicAssert.AreEqual("arg1", res); } // Run code with errors diff --git a/test/Garnet.test/LuaScriptTests.cs b/test/Garnet.test/LuaScriptTests.cs index 5d3a67fa12..a52dcbe158 100644 --- a/test/Garnet.test/LuaScriptTests.cs +++ b/test/Garnet.test/LuaScriptTests.cs @@ -309,5 +309,152 @@ public void UseEvalShaLightClient() ClassicAssert.IsTrue(strKeyValue == valueKey); } } + + [Test] + public void SuccessfulStatusReturn() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var statusReplyScript = "return redis.status_reply('Success')"; + var result = db.ScriptEvaluate(statusReplyScript); + ClassicAssert.AreEqual((RedisValue)result, "Success"); + var directReplyScript = "return { ok = 'Success' }"; + result = db.ScriptEvaluate(directReplyScript); + ClassicAssert.AreEqual((RedisValue)result, "Success"); + } + + [Test] + public void FailureStatusReturn() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var statusReplyScript = "return redis.error_reply('Failure')"; + try + { + _ = db.ScriptEvaluate(statusReplyScript); + } + catch (RedisServerException ex) + { + ClassicAssert.AreEqual(ex.Message, "Failure"); + } + var directReplyScript = "return { err = 'Failure' }"; + try + { + _ = db.ScriptEvaluate(directReplyScript); + } + catch (RedisServerException ex) + { + ClassicAssert.AreEqual(ex.Message, "Failure"); + } + } + + [Test] + public void ComplexLuaTest1() + { + var script = """ +local setArgs = {} +for _, key in ipairs(KEYS) do + table.insert(setArgs, key) +end +unpack(KEYS) +return redis.status_reply(table.concat(setArgs)) +"""; + + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var response = db.ScriptEvaluate(script, ["key1", "key2"], ["value", 1, 60_000]); + ClassicAssert.AreEqual("key1key2", (string)response); + response = db.ScriptEvaluate(script, ["key1", "key2"], ["value"]); + ClassicAssert.AreEqual("key1key2", (string)response); + response = db.ScriptEvaluate(script, ["key1"], ["value", 1, 60_000]); + ClassicAssert.AreEqual("key1", (string)response); + response = db.ScriptEvaluate(script, ["key1", "key2"]); + ClassicAssert.AreEqual("key1key2", (string)response); + response = db.ScriptEvaluate(script, ["key1", "key2", "key3", "key4"], ["value", 1]); + ClassicAssert.AreEqual("key1key2key3key4", (string)response); + response = db.ScriptEvaluate(script, [], ["value", 1, 60_000]); + ClassicAssert.AreEqual("", (string)response); + } + + [Test] + public void ComplexLuaTest2() + { + var script1 = """ +local function callpexpire(ttl) + for _, key in ipairs(KEYS) do + redis.call("pexpire", key, ttl) + end +end + +local function callgetrange() + local offset = tonumber(ARGV[2]) + + for _, key in ipairs(KEYS) do + if redis.call("getrange", key, 0, offset-1) ~= string.sub(ARGV[1], 1, offset) then + return false + end + end + return true +end + +local setArgs = {} +for _, key in ipairs(KEYS) do + table.insert(setArgs, key) + table.insert(setArgs, ARGV[1]) +end + +if redis.call("msetnx", unpack(setArgs)) ~= 1 then + if callgetrange() == false then + return false + end + redis.call("mset", unpack(setArgs)) +end + +callpexpire(ARGV[3]) +return redis.status_reply("OK") +"""; + + var script2 = """ +local values = redis.call("mget", unpack(KEYS)) +for i, _ in ipairs(KEYS) do + if values[i] ~= ARGV[1] then + return false + end +end + +redis.call("del", unpack(KEYS)) + +return redis.status_reply("OK") +"""; + + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var response1 = db.ScriptEvaluate(script1, ["key1", "key2"], ["foo", 3, 60_000]); + ClassicAssert.AreEqual("OK", (string)response1); + var response2 = db.ScriptEvaluate(script2, ["key3"], ["foo"]); + ClassicAssert.AreEqual(false, (bool)response2); + var response3 = db.ScriptEvaluate(script2, ["key1", "key2"], ["foo"]); + ClassicAssert.AreEqual("OK", (string)response3); + } + + [Test] + public void ComplexLuaTest3() + { + var script1 = """ +return redis.call("mget", unpack(KEYS)) +"""; + + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + for (int i = 0; i < 10; i++) + { + var response1 = (string[])db.ScriptEvaluate(script1, ["key1", "key2"]); + ClassicAssert.AreEqual(2, response1.Length); + foreach (var item in response1) + { + ClassicAssert.AreEqual(null, item); + } + } + } } } \ No newline at end of file diff --git a/test/Garnet.test/Resp/ACL/RespCommandTests.cs b/test/Garnet.test/Resp/ACL/RespCommandTests.cs index 7988bc9e53..17de3946f4 100644 --- a/test/Garnet.test/Resp/ACL/RespCommandTests.cs +++ b/test/Garnet.test/Resp/ACL/RespCommandTests.cs @@ -83,7 +83,7 @@ public void AllCommandsCovered() ClassicAssert.IsTrue(RespCommandsInfo.TryGetRespCommandNames(out IReadOnlySet advertisedCommands), "Couldn't get advertised RESP commands"); // TODO: See if these commands could be identified programmatically - IEnumerable withOnlySubCommands = ["ACL", "CLUSTER", "CONFIG", "LATENCY", "MEMORY", "MODULE"]; + IEnumerable withOnlySubCommands = ["ACL", "CLIENT", "CLUSTER", "CONFIG", "LATENCY", "MEMORY", "MODULE"]; IEnumerable notCoveredByACLs = allInfo.Where(static x => x.Value.Flags.HasFlag(RespCommandFlags.NoAuth)).Select(static kv => kv.Key); // Check tests against RespCommandsInfo @@ -106,7 +106,7 @@ public void AllCommandsCovered() .Where(cmd => !notCoveredByACLs.Contains(cmd.ToString().Replace("_", ""), StringComparer.OrdinalIgnoreCase)); IEnumerable notCovered = testableValues.Where(cmd => !covered.Contains(cmd.ToString().Replace("_", ""), StringComparer.OrdinalIgnoreCase)); - ClassicAssert.IsEmpty(notCovered, $"Commands in RespCOmmand not covered by ACL Tests:{Environment.NewLine}{string.Join(Environment.NewLine, notCovered.OrderBy(static x => x))}"); + ClassicAssert.IsEmpty(notCovered, $"Commands in RespCommand not covered by ACL Tests:{Environment.NewLine}{string.Join(Environment.NewLine, notCovered.OrderBy(static x => x))}"); } } @@ -208,7 +208,7 @@ static async Task DoAclLoadAsync(GarnetClient client) } catch (Exception e) { - if (e.Message != "ERR Cannot find ACL configuration file ''") + if (e.Message != "ERR This Garnet instance is not configured to use an ACL file. Please restart server with --acl-file option.") { throw; } @@ -234,7 +234,7 @@ static async Task DoAclSaveAsync(GarnetClient client) } catch (Exception e) { - if (e.Message != "ERR ACL configuration file not set.") + if (e.Message != "ERR This Garnet instance is not configured to use an ACL file. Please restart server with --acl-file option.") { throw; } @@ -639,19 +639,97 @@ static async Task DoBitPosStartEndByteAsync(GarnetClient client) } [Test] - public async Task ClientACLsAsync() + public async Task ClientIdACLsAsync() { - // TODO: client isn't really implemented looks like, so this is mostly a placeholder in case it gets implemented correctly + await CheckCommandsAsync( + "CLIENT ID", + [DoClientIdAsync] + ); + static async Task DoClientIdAsync(GarnetClient client) + { + long val = await client.ExecuteForLongResultAsync("CLIENT", ["ID"]); + ClassicAssert.AreNotEqual(0, val); + } + } + + [Test] + public async Task ClientInfoACLsAsync() + { await CheckCommandsAsync( - "CLIENT", - [DoClientAsync] + "CLIENT INFO", + [DoClientInfoAsync] ); - static async Task DoClientAsync(GarnetClient client) + static async Task DoClientInfoAsync(GarnetClient client) { - string val = await client.ExecuteForStringResultAsync("CLIENT"); - ClassicAssert.AreEqual("OK", val); + string val = await client.ExecuteForStringResultAsync("CLIENT", ["INFO"]); + ClassicAssert.IsNotEmpty(val); + } + } + + [Test] + public async Task ClientListACLsAsync() + { + await CheckCommandsAsync( + "CLIENT LIST", + [DoClientListAsync, DoClientListTypeAsync, DoClientListIdAsync, DoClientListIdsAsync] + ); + + static async Task DoClientListAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("CLIENT", ["LIST"]); + ClassicAssert.IsNotEmpty(val); + } + + static async Task DoClientListTypeAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("CLIENT", ["LIST", "TYPE", "NORMAL"]); + ClassicAssert.IsNotEmpty(val); + } + + static async Task DoClientListIdAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("CLIENT", ["LIST", "ID", "1"]); + ClassicAssert.IsNotEmpty(val); + } + + static async Task DoClientListIdsAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("CLIENT", ["LIST", "ID", "1", "2"]); + ClassicAssert.IsNotEmpty(val); + } + } + + [Test] + public async Task ClientKillACLsAsync() + { + await CheckCommandsAsync( + "CLIENT KILL", + [DoClientKillAsync, DoClientFilterAsync] + ); + + static async Task DoClientKillAsync(GarnetClient client) + { + try + { + _ = await client.ExecuteForStringResultAsync("CLIENT", ["KILL", "foo"]); + } + catch (Exception ex) + { + if (ex.Message.Equals("ERR No such client")) + { + return; + } + + throw; + } + } + + static async Task DoClientFilterAsync(GarnetClient client) + { + var count = await client.ExecuteForLongResultAsync("CLIENT", ["KILL", "ID", "123"]); + ClassicAssert.AreEqual(0, count); } } @@ -2108,6 +2186,34 @@ static async Task DoCommandInfoMultiAsync(GarnetClient client) } } + [Test] + public async Task CommandDocsACLsAsync() + { + await CheckCommandsAsync( + "COMMAND DOCS", + [DoCommandDocsAsync, DoCommandDocsOneAsync, DoCommandDocsMultiAsync], + skipPermitted: true + ); + + static async Task DoCommandDocsAsync(GarnetClient client) + { + // COMMAND|DOCS returns an array of arrays, which GarnetClient doesn't deal with + await client.ExecuteForStringResultAsync("COMMAND", ["DOCS"]); + } + + static async Task DoCommandDocsOneAsync(GarnetClient client) + { + // COMMAND|DOCS returns an array of arrays, which GarnetClient doesn't deal with + await client.ExecuteForStringResultAsync("COMMAND", ["DOCS", "GET"]); + } + + static async Task DoCommandDocsMultiAsync(GarnetClient client) + { + // COMMAND|DOCS returns an array of arrays, which GarnetClient doesn't deal with + await client.ExecuteForStringResultAsync("COMMAND", ["DOCS", "GET", "SET", "APPEND"]); + } + } + [Test] public async Task CommitAOFACLsAsync() { @@ -2526,8 +2632,6 @@ static async Task DoExistsMultiAsync(GarnetClient client) [Test] public async Task ExpireACLsAsync() { - // TODO: expire doesn't support combinations of flags (XX GT, XX LT are legal) so those will need to be tested when implemented - await CheckCommandsAsync( "EXPIRE", [DoExpireAsync, DoExpireNXAsync, DoExpireXXAsync, DoExpireGTAsync, DoExpireLTAsync] @@ -2564,6 +2668,96 @@ static async Task DoExpireLTAsync(GarnetClient client) } } + [Test] + public async Task ExpireAtACLsAsync() + { + await CheckCommandsAsync( + "EXPIREAT", + [DoExpireAsync, DoExpireNXAsync, DoExpireXXAsync, DoExpireGTAsync, DoExpireLTAsync] + ); + + + static async Task DoExpireAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("EXPIREAT", ["foo", expireTimestamp]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireNXAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("EXPIREAT", ["foo", "10", "NX"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireXXAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("EXPIREAT", ["foo", "10", "XX"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireGTAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("EXPIREAT", ["foo", "10", "GT"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireLTAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("EXPIREAT", ["foo", "10", "LT"]); + ClassicAssert.AreEqual(0, val); + } + } + + [Test] + public async Task PExpireAtACLsAsync() + { + await CheckCommandsAsync( + "PEXPIREAT", + [DoExpireAsync, DoExpireNXAsync, DoExpireXXAsync, DoExpireGTAsync, DoExpireLTAsync] + ); + + + static async Task DoExpireAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeMilliseconds().ToString(); + long val = await client.ExecuteForLongResultAsync("PEXPIREAT", ["foo", expireTimestamp]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireNXAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("PEXPIREAT", ["foo", "10", "NX"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireXXAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("PEXPIREAT", ["foo", "10", "XX"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireGTAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("PEXPIREAT", ["foo", "10", "GT"]); + ClassicAssert.AreEqual(0, val); + } + + static async Task DoExpireLTAsync(GarnetClient client) + { + var expireTimestamp = DateTimeOffset.UtcNow.AddMinutes(1).ToUnixTimeSeconds().ToString(); + long val = await client.ExecuteForLongResultAsync("PEXPIREAT", ["foo", "10", "LT"]); + ClassicAssert.AreEqual(0, val); + } + } + [Test] public async Task FailoverACLsAsync() { @@ -3402,6 +3596,21 @@ static async Task DoLPopCountAsync(GarnetClient client) } } + [Test] + public async Task LPosACLsAsync() + { + await CheckCommandsAsync( + "LPOS", + [DoLPosAsync] + ); + + static async Task DoLPosAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("LPOS", ["foo", "a"]); + ClassicAssert.IsNull(val); + } + } + [Test] public async Task LPushACLsAsync() { @@ -3776,7 +3985,36 @@ static async Task DoMigrateAsync(GarnetClient client) try { await client.ExecuteForStringResultAsync("MIGRATE", ["127.0.0.1", "9999", "KEY", "0", "1000"]); - Assert.Fail("Shouldn't succeed, no replicas are attached"); + Assert.Fail("Shouldn't be reachable, cluster isn't enabled"); + } + catch (Exception e) + { + if (e.Message == "ERR This instance has cluster support disabled") + { + return; + } + + throw; + } + } + } + + [Test] + public async Task PurgeBPACLsAsync() + { + // Uses exceptions for control flow, as we're not setting up replicas here + + await CheckCommandsAsync( + "PURGEBP", + [DoPurgeBPClusterAsync, DoPurgeBPAsync] + ); + + static async Task DoPurgeBPClusterAsync(GarnetClient client) + { + try + { + await client.ExecuteForStringResultAsync("PURGEBP", ["MigrationManager"]); + Assert.Fail("Shouldn't be reachable, cluster isn't enabled"); } catch (Exception e) { @@ -3788,6 +4026,12 @@ static async Task DoMigrateAsync(GarnetClient client) throw; } } + + static async Task DoPurgeBPAsync(GarnetClient client) + { + string val = await client.ExecuteForStringResultAsync("PURGEBP", ["ServerListener"]); + ClassicAssert.AreEqual("GC completed for ServerListener", val); + } } [Test] @@ -4195,15 +4439,45 @@ static async Task DoRegisterCSAsync(GarnetClient client) } } + [Test] + public async Task ExpireTimeACLsAsync() + { + await CheckCommandsAsync( + "EXPIRETIME", + [DoExpireTimeAsync] + ); + + static async Task DoExpireTimeAsync(GarnetClient client) + { + var val = await client.ExecuteForLongResultAsync("EXPIRETIME", ["foo"]); + ClassicAssert.AreEqual(-2, val); + } + } + + [Test] + public async Task PExpireTimeACLsAsync() + { + await CheckCommandsAsync( + "PEXPIRETIME", + [DoPExpireTimeAsync] + ); + + static async Task DoPExpireTimeAsync(GarnetClient client) + { + var val = await client.ExecuteForLongResultAsync("PEXPIRETIME", ["foo"]); + ClassicAssert.AreEqual(-2, val); + } + } + [Test] public async Task RenameACLsAsync() { await CheckCommandsAsync( "RENAME", - [DoPTTLAsync] + [DoRENAMEAsync] ); - static async Task DoPTTLAsync(GarnetClient client) + static async Task DoRENAMEAsync(GarnetClient client) { try { @@ -4222,6 +4496,33 @@ static async Task DoPTTLAsync(GarnetClient client) } } + [Test] + public async Task RenameNxACLsAsync() + { + await CheckCommandsAsync( + "RENAMENX", + [DoRENAMENXAsync] + ); + + static async Task DoRENAMENXAsync(GarnetClient client) + { + try + { + await client.ExecuteForStringResultAsync("RENAMENX", ["foo", "bar"]); + Assert.Fail("Shouldn't succeed, key doesn't exist"); + } + catch (Exception e) + { + if (e.Message == "ERR no such key") + { + return; + } + + throw; + } + } + } + [Test] public async Task ReplicaOfACLsAsync() { diff --git a/test/Garnet.test/RespAdminCommandsTests.cs b/test/Garnet.test/RespAdminCommandsTests.cs index d3deb1cefc..19963ba9b3 100644 --- a/test/Garnet.test/RespAdminCommandsTests.cs +++ b/test/Garnet.test/RespAdminCommandsTests.cs @@ -306,7 +306,7 @@ static void ValidateServerData(IDatabase db, string strKey, string strValue, str [Test] [TestCase(63, 15, 1)] - [TestCase(63, 1, 1)] + [TestCase(63, 2, 1)] [TestCase(16, 16, 1)] [TestCase(5, 64, 1)] public void SeSaveRecoverMultipleObjectsTest(int memorySize, int recoveryMemorySize, int pageSize) @@ -335,7 +335,7 @@ public void SeSaveRecoverMultipleObjectsTest(int memorySize, int recoveryMemoryS } server.Dispose(false); - server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, tryRecover: true, lowMemory: true, MemorySize: sizeToString(recoveryMemorySize), PageSize: sizeToString(pageSize), objectStoreTotalMemorySize: "64k"); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, tryRecover: true, lowMemory: true, MemorySize: sizeToString(recoveryMemorySize), PageSize: sizeToString(pageSize), objectStoreHeapMemorySize: "64k"); server.Start(); ClassicAssert.LessOrEqual(server.Provider.StoreWrapper.objectStore.MaxAllocatedPageCount, (recoveryMemorySize / pageSize) + 1); @@ -363,7 +363,7 @@ public void SeSaveRecoverMultipleKeysTest(string memorySize, string recoveryMemo bool disableObj = true; server.Dispose(); - server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, DisableObjects: disableObj, lowMemory: true, MemorySize: memorySize, PageSize: "1k", enableAOF: true); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, DisableObjects: disableObj, lowMemory: true, MemorySize: memorySize, PageSize: "512", enableAOF: true); server.Start(); using (var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig(allowAdmin: true))) @@ -402,7 +402,7 @@ public void SeSaveRecoverMultipleKeysTest(string memorySize, string recoveryMemo } server.Dispose(false); - server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, DisableObjects: disableObj, tryRecover: true, lowMemory: true, MemorySize: recoveryMemorySize, PageSize: "1k", enableAOF: true); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, DisableObjects: disableObj, tryRecover: true, lowMemory: true, MemorySize: recoveryMemorySize, PageSize: "512", enableAOF: true); server.Start(); using (var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig(allowAdmin: true))) diff --git a/test/Garnet.test/RespCommandTests.cs b/test/Garnet.test/RespCommandTests.cs index 3507463c9f..b0c7a5bb93 100644 --- a/test/Garnet.test/RespCommandTests.cs +++ b/test/Garnet.test/RespCommandTests.cs @@ -7,6 +7,7 @@ using System.Linq; using System.Reflection; using System.Runtime.InteropServices; +using Garnet.common; using Garnet.server; using NUnit.Framework; using NUnit.Framework.Legacy; @@ -23,17 +24,27 @@ public class RespCommandTests GarnetServer server; private string extTestDir; private IReadOnlyDictionary respCommandsInfo; + private IReadOnlyDictionary respSubCommandsInfo; + private IReadOnlyDictionary respCommandsDocs; + private IReadOnlyDictionary respSubCommandsDocs; private IReadOnlyDictionary respCustomCommandsInfo; + private IReadOnlyDictionary respCustomCommandsDocs; [SetUp] public void Setup() { TestUtils.DeleteDirectory(TestUtils.MethodTestDir, wait: true); extTestDir = Path.Combine(TestUtils.MethodTestDir, "test"); - ClassicAssert.IsTrue(RespCommandsInfo.TryGetRespCommandsInfo(out respCommandsInfo, externalOnly: true)); + ClassicAssert.IsTrue(RespCommandsInfo.TryGetRespCommandsInfo(out respCommandsInfo, externalOnly: false)); + ClassicAssert.IsTrue(RespCommandsInfo.TryGetRespSubCommandsInfo(out respSubCommandsInfo, externalOnly: false)); + ClassicAssert.IsTrue(RespCommandDocs.TryGetRespCommandsDocs(out respCommandsDocs, externalOnly: false)); + ClassicAssert.IsTrue(RespCommandDocs.TryGetRespSubCommandsDocs(out respSubCommandsDocs, externalOnly: false)); ClassicAssert.IsTrue(TestUtils.TryGetCustomCommandsInfo(out respCustomCommandsInfo)); + ClassicAssert.IsTrue(TestUtils.TryGetCustomCommandsDocs(out respCustomCommandsDocs)); ClassicAssert.IsNotNull(respCommandsInfo); + ClassicAssert.IsNotNull(respCommandsDocs); ClassicAssert.IsNotNull(respCustomCommandsInfo); + ClassicAssert.IsNotNull(respCustomCommandsDocs); server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, disablePubSub: true, extensionBinPaths: [extTestDir]); server.Start(); @@ -113,6 +124,52 @@ public void CommandInfoTest() } } + /// + /// Test COMMAND DOCS command + /// + [Test] + public void CommandDocsTest() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + // Get all commands using COMMAND DOCS command + var results = (RedisResult[])db.Execute("COMMAND", "DOCS"); + + ClassicAssert.IsNotNull(results); + ClassicAssert.AreEqual(respCommandsDocs.Count, results.Length / 2); + + // Register custom commands + var customCommandsRegistered = RegisterCustomCommands(); + + // Dynamically register custom commands + var customCommandsRegisteredDyn = DynamicallyRegisterCustomCommands(db); + + // Get all commands (including custom commands) using COMMAND DOCS command + results = (RedisResult[])db.Execute("COMMAND", "DOCS"); + + ClassicAssert.IsNotNull(results); + var expectedCommands = + respCommandsDocs.Keys + .Union(customCommandsRegistered) + .Union(customCommandsRegisteredDyn).OrderBy(c => c); + + var cmdNameToResult = new Dictionary(); + for (var i = 0; i < results.Length; i += 2) + { + cmdNameToResult.Add(results[i].ToString(), results[i + 1]); + } + + var actualCommands = cmdNameToResult.Keys.OrderBy(c => c); + CollectionAssert.AreEqual(expectedCommands, actualCommands); + + foreach (var cmdName in respCommandsDocs.Keys.Union(customCommandsRegistered).Union(customCommandsRegisteredDyn)) + { + ClassicAssert.Contains(cmdName, cmdNameToResult.Keys); + VerifyCommandDocs(cmdName, cmdNameToResult[cmdName]); + } + } + /// /// Test COMMAND COUNT command /// @@ -166,8 +223,8 @@ public void CommandUnknownSubcommandTest() /// Test COMMAND INFO [command-name [command-name ...]] /// [Test] - [TestCase(["GET", "SET", "COSCAN"])] - [TestCase(["get", "set", "coscan"])] + [TestCase(["GET", "SET", "COSCAN", "ACL|LOAD", "WATCH|MS"])] + [TestCase(["get", "set", "coscan", "acl|load", "watch|ms"])] public void CommandInfoWithCommandNamesTest(params string[] commands) { using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); @@ -188,6 +245,33 @@ public void CommandInfoWithCommandNamesTest(params string[] commands) } } + /// + /// Test COMMAND DOCS [command-name [command-name ...]] + /// + [Test] + [TestCase(["GET", "SET", "COSCAN", "ACL|LOAD", "WATCH|MS"])] + [TestCase(["get", "set", "coscan", "acl|load", "watch|ms"])] + public void CommandDocsWithCommandNamesTest(params string[] commands) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var args = new object[] { "DOCS" }.Union(commands).ToArray(); + + // Get basic commands using COMMAND INFO command + var results = (RedisResult[])db.Execute("COMMAND", args); + + ClassicAssert.IsNotNull(results); + ClassicAssert.AreEqual(commands.Length, results.Length / 2); + + for (var i = 0; i < commands.Length; i++) + { + ClassicAssert.AreEqual(commands[i].ToUpper(), results[2 * i].ToString()); + var info = results[(2 * i) + 1]; + VerifyCommandDocs(commands[i], info); + } + } + /// /// Test COMMAND INFO with custom commands /// @@ -220,6 +304,39 @@ public void CommandInfoWithCustomCommandNamesTest(params string[] commands) } } + /// + /// Test COMMAND INFO with custom commands + /// + [Test] + [TestCase(["SETIFPM", "MYDICTSET", "MGETIFPM", "READWRITETX", "MYDICTGET"])] + [TestCase(["setifpm", "mydictset", "mgetifpm", "readwritetx", "mydictget"])] + public void CommandDocsWithCustomCommandNamesTest(params string[] commands) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + // Register custom commands + RegisterCustomCommands(); + + // Dynamically register custom commands + DynamicallyRegisterCustomCommands(db); + + var args = new object[] { "DOCS" }.Union(commands).ToArray(); + + // Get basic commands using COMMAND DOCS command + var results = (RedisResult[])db.Execute("COMMAND", args); + + ClassicAssert.IsNotNull(results); + ClassicAssert.AreEqual(commands.Length, results.Length / 2); + + for (var i = 0; i < commands.Length; i++) + { + ClassicAssert.AreEqual(commands[i].ToUpper(), results[2 * i].ToString()); + var info = results[(2 * i) + 1]; + VerifyCommandDocs(commands[i], info); + } + } + [Test] public void AofIndependentCommandsTest() { @@ -228,7 +345,6 @@ public void AofIndependentCommandsTest() RespCommand.PING, RespCommand.SELECT, RespCommand.ECHO, - RespCommand.CLIENT, RespCommand.MONITOR, RespCommand.MODULE_LOADCS, RespCommand.REGISTERCS, @@ -244,9 +360,15 @@ public void AofIndependentCommandsTest() RespCommand.ACL_SETUSER, RespCommand.ACL_USERS, RespCommand.ACL_WHOAMI, + // Client + RespCommand.CLIENT_ID, + RespCommand.CLIENT_INFO, + RespCommand.CLIENT_LIST, + RespCommand.CLIENT_KILL, // Command RespCommand.COMMAND, RespCommand.COMMAND_COUNT, + RespCommand.COMMAND_DOCS, RespCommand.COMMAND_INFO, RespCommand.MEMORY_USAGE, // Config @@ -273,14 +395,14 @@ private string[] RegisterCustomCommands() var registeredCommands = new[] { "SETIFPM", "MYDICTSET", "MGETIFPM" }; var factory = new MyDictFactory(); - server.Register.NewCommand("SETIFPM", CommandType.ReadModifyWrite, new SetIfPMCustomCommand(), respCustomCommandsInfo["SETIFPM"]); - server.Register.NewCommand("MYDICTSET", CommandType.ReadModifyWrite, factory, new MyDictSet(), respCustomCommandsInfo["MYDICTSET"]); - server.Register.NewTransactionProc("MGETIFPM", () => new MGetIfPM(), respCustomCommandsInfo["MGETIFPM"]); + server.Register.NewCommand("SETIFPM", CommandType.ReadModifyWrite, new SetIfPMCustomCommand(), respCustomCommandsInfo["SETIFPM"], respCustomCommandsDocs["SETIFPM"]); + server.Register.NewCommand("MYDICTSET", CommandType.ReadModifyWrite, factory, new MyDictSet(), respCustomCommandsInfo["MYDICTSET"], respCustomCommandsDocs["MYDICTSET"]); + server.Register.NewTransactionProc("MGETIFPM", () => new MGetIfPM(), respCustomCommandsInfo["MGETIFPM"], respCustomCommandsDocs["MGETIFPM"]); return registeredCommands; } - private (string, string) CreateTestLibrary() + private (string, string, string) CreateTestLibrary() { var runtimePath = RuntimeEnvironment.GetRuntimeDirectory(); var binPath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); @@ -336,19 +458,23 @@ private string[] RegisterCustomCommands() var cmdInfoPath = Path.Combine(dir1, Path.GetFileName(TestUtils.CustomRespCommandInfoJsonPath)!); File.Copy(TestUtils.CustomRespCommandInfoJsonPath!, cmdInfoPath); - return (cmdInfoPath, Path.Combine(dir1, "testLib1.dll")); + var cmdDocsPath = Path.Combine(dir1, Path.GetFileName(TestUtils.CustomRespCommandDocsJsonPath)!); + File.Copy(TestUtils.CustomRespCommandDocsJsonPath!, cmdDocsPath); + + return (cmdInfoPath, cmdDocsPath, Path.Combine(dir1, "testLib1.dll")); } private string[] DynamicallyRegisterCustomCommands(IDatabase db) { var registeredCommands = new[] { "READWRITETX", "MYDICTGET" }; - var (cmdInfoPath, srcPath) = CreateTestLibrary(); + var (cmdInfoPath, cmdDocsPath, srcPath) = CreateTestLibrary(); var args = new List { "TXN", "READWRITETX", 3, "ReadWriteTxn", "READ", "MYDICTGET", 1, "MyDictFactory", "INFO", cmdInfoPath, + "DOCS", cmdDocsPath, "SRC", srcPath }; @@ -362,21 +488,49 @@ private string[] DynamicallyRegisterCustomCommands(IDatabase db) private void VerifyCommandInfo(string cmdName, RedisResult result) { - RespCommandsInfo cmdInfo = default; - if (respCommandsInfo.ContainsKey(cmdName)) - { - cmdInfo = respCommandsInfo[cmdName]; - } - else if (respCustomCommandsInfo.ContainsKey(cmdName)) - { - cmdInfo = respCustomCommandsInfo[cmdName]; - } - else Assert.Fail(); + if (!respCommandsInfo.TryGetValue(cmdName, out var cmdInfo) && + !respSubCommandsInfo.TryGetValue(cmdName, out cmdInfo) && + !respCustomCommandsInfo.TryGetValue(cmdName, out cmdInfo)) + Assert.Fail(); ClassicAssert.IsNotNull(result); ClassicAssert.AreEqual(10, result.Length); ClassicAssert.AreEqual(cmdInfo.Name, (string)result[0]); ClassicAssert.AreEqual(cmdInfo.Arity, (int)result[1]); } + + private void VerifyCommandDocs(string cmdName, RedisResult result) + { + ClassicAssert.IsNotNull(result); + + if (!respCommandsDocs.TryGetValue(cmdName, out var cmdDoc) && + !respSubCommandsDocs.TryGetValue(cmdName, out cmdDoc) && + !respCustomCommandsDocs.TryGetValue(cmdName, out cmdDoc)) + Assert.Fail(); + + for (var i = 0; i < result.Length; i += 2) + { + var key = result[i].ToString(); + var value = result[i + 1]; + + switch (key) + { + case "summary": + ClassicAssert.AreEqual(cmdDoc.Summary, value.ToString()); + break; + case "group": + if (cmdDoc.Group == RespCommandGroup.None) continue; + ClassicAssert.IsTrue(EnumUtils.TryParseEnumFromDescription(value.ToString(), out RespCommandGroup group)); + ClassicAssert.AreEqual(cmdDoc.Group, group); + break; + case "arguments": + ClassicAssert.AreEqual(cmdDoc.Arguments.Length, value.Length); + break; + case "subcommands": + ClassicAssert.AreEqual(cmdDoc.SubCommands.Length, value.Length / 2); + break; + } + } + } } } \ No newline at end of file diff --git a/test/Garnet.test/RespHashTests.cs b/test/Garnet.test/RespHashTests.cs index 5991ae0b4a..91289dae2e 100644 --- a/test/Garnet.test/RespHashTests.cs +++ b/test/Garnet.test/RespHashTests.cs @@ -1074,7 +1074,7 @@ public async Task CanFailWhenUseMultiWatchTest() await Task.Run(() => UpdateHashMap(key)); res = lightClientRequest.SendCommand("EXEC"); - expectedResponse = "$-1"; + expectedResponse = "*-1"; ClassicAssert.AreEqual(res.AsSpan().Slice(0, expectedResponse.Length).ToArray(), expectedResponse); // This sequence should work diff --git a/test/Garnet.test/RespListTests.cs b/test/Garnet.test/RespListTests.cs index 6f99ba2eb1..fbf140c13d 100644 --- a/test/Garnet.test/RespListTests.cs +++ b/test/Garnet.test/RespListTests.cs @@ -1440,5 +1440,156 @@ public void CheckListOperationsOnWrongTypeObjectSE() // LMPOP RIGHT RespTestsUtils.CheckCommandOnWrongTypeObjectSE(() => db.ListRightPop(keys, 3)); } + + #region LPOS + + [Test] + [TestCase("a,c,b,c,d", "a", 0)] + [TestCase("a,c,b,c,adc", "adc", 4)] + [TestCase("a,c,b,c,d", "c", 1)] + [TestCase("av,123,bs,c,d", "e", null)] + public void LPOSWithoutOptions(string items, string find, int? expectedIndex) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "KeyA"; + string[] arguments = [key, .. items.Split(",")]; + + db.Execute("RPUSH", arguments); + + var actualIndex = (int?)db.Execute("LPOS", key, find); + + ClassicAssert.AreEqual(expectedIndex, actualIndex); + } + + [Test] + public void LPOSWithInvalidKey() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "KeyA"; + + var result = (int?)db.Execute("LPOS", key, "e"); + + ClassicAssert.IsNull(result); + } + + [Test] + [TestCase("a,c,b,c,d", "c", "1", "rank,1")] + [TestCase("a,c,b,c,d", "c", "3", "RANK,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,3")] + [TestCase("a,c,b,c,d", "c", "3", "RANK,-1")] + [TestCase("a,c,b,c,d", "c", "1", "rank,-2")] + [TestCase("a,c,b,c,d", "c", "null", "RANK,-3")] + [TestCase("a,c,b,c,d", "a", "null", "rank,2")] + [TestCase("a,c,b,c,d", "b", "2", "count,2")] + [TestCase("a,c,b,c,d", "c", "1", "count,1")] + [TestCase("a,c,b,c,d", "c", "1,3", "COUNT,2")] + [TestCase("a,c,b,c,d", "c", "1,3", "count,3")] + [TestCase("a,c,b,c,d", "c", "1,3", "count,0")] + [TestCase("a,c,b,c,d", "c", "1", "maxlen,0")] + [TestCase("a,c,b,c,d", "c", "null", "MAXLEN,1")] + [TestCase("a,c,b,c,d", "c", "1", "maxlen,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,-1,maxlen,1")] + [TestCase("a,c,b,c,d", "c", "3", "rank,-1,maxlen,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,-2,maxlen,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,1,maxlen,1")] + [TestCase("a,c,b,c,d", "c", "1", "rank,1,maxlen,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,2,maxlen,2")] + [TestCase("a,c,b,c,d", "c", "3,1", "rank,-1,maxlen,0,count,0")] + [TestCase("a,c,b,c,d", "c", "3", "rank,-1,maxlen,0,count,1")] + [TestCase("a,c,b,c,d", "c", "1,3", "rank,1,maxlen,0,count,0")] + [TestCase("a,c,b,c,d", "c", "1", "rank,1,maxlen,0,count,1")] + [TestCase("z,b,z,d,e,a,b,c,d,e,a,b,c,d,e,a,b,c,d,e,a,b,c,z,z", "z", "0,2,23,24", "count,0")] // Test for buffer copy + public void LPOSWithOptions(string items, string find, string expectedIndexs, string options) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "KeyA"; + string[] pushArguments = [key, .. items.Split(",")]; + string[] lopsArguments = [key, find, .. options.Split(",")]; + var expectedIndexInts = expectedIndexs.Split(",").Select(ToNullableInt).ToList(); + + db.Execute("RPUSH", pushArguments); + + if (!options.Contains("count", StringComparison.InvariantCultureIgnoreCase)) + { + var actualIndex = (int?)db.Execute("LPOS", lopsArguments); + + ClassicAssert.AreEqual(expectedIndexInts[0], actualIndex); + } + else + { + var actualIndex = (int[])db.Execute("LPOS", lopsArguments); + + ClassicAssert.AreEqual(expectedIndexInts.Count, actualIndex.Length); + foreach (var index in expectedIndexInts.Zip(actualIndex)) + { + ClassicAssert.AreEqual(index.First, index.Second); + } + } + } + + [Test] + [TestCase("a,c,b,c,d", "c", "1", null, 1, 0)] + [TestCase("a,c,b,c,d", "c", "3", null, -1, 0)] + [TestCase("a,c,b,c,d", "c", "1,3", 2, 1, 0)] + [TestCase("a,c,b,c,d", "c", "3,1", 2, -1, 0)] + [TestCase("a,c,b,c,d", "c", "1", 2, 1, 3)] + public void LPOSWithListPosition(string items, string find, string expectedIndexs, int? count, int rank, int maxLength) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "KeyA"; + string[] pushArguments = [key, .. items.Split(",")]; + var expectedIndexInts = expectedIndexs.Split(",").Select(ToNullableInt).ToList(); + + db.Execute("RPUSH", pushArguments); + + if (!count.HasValue) + { + var actualIndex = db.ListPosition(key, find, rank, maxLength); + + ClassicAssert.AreEqual(expectedIndexInts[0], actualIndex); + } + else + { + var actualIndexs = db.ListPositions(key, find, count.Value, rank, maxLength); + + ClassicAssert.AreEqual(expectedIndexInts.Count, actualIndexs.Length); + foreach (var index in expectedIndexInts.Zip(actualIndexs)) + { + ClassicAssert.AreEqual(index.First, index.Second); + } + } + } + + [Test] + [TestCase("a,c,b,c,d", "c", "1", "rank,0")] + [TestCase("a,c,b,c,d", "c", "3", "count,-1")] + [TestCase("a,c,b,c,d", "c", "null", "MAXLEN,-5")] + [TestCase("a,c,b,c,d", "c", "null", "rand,2")] + [TestCase("a,c,b,c,d", "c", "null", "rank,1,count,-1")] + public void LPOSWithInvalidOptions(string items, string find, string expectedIndexs, string options) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "KeyA"; + string[] pushArguments = [key, .. items.Split(",")]; + string[] lopsArguments = [key, find, .. options.Split(",")]; + var expectedIndexInts = expectedIndexs.Split(",").Select(ToNullableInt).ToList(); + db.Execute("RPUSH", pushArguments); + + Assert.Throws(() => db.Execute("LPOS", lopsArguments)); + } + + private int? ToNullableInt(string s) + { + int i; + if (int.TryParse(s, out i)) return i; + return null; + } + + #endregion } } \ No newline at end of file diff --git a/test/Garnet.test/RespModuleTests.cs b/test/Garnet.test/RespModuleTests.cs index b5afb02514..c8bf7f46d1 100644 --- a/test/Garnet.test/RespModuleTests.cs +++ b/test/Garnet.test/RespModuleTests.cs @@ -36,7 +36,7 @@ public void TearDown() TestUtils.DeleteDirectory(Directory.GetParent(testModuleDir)?.FullName); } - private string CreateTestModule(string onLoadBody) + private string CreateTestModule(string onLoadBody, string moduleName = "TestModule.dll") { var runtimePath = RuntimeEnvironment.GetRuntimeDirectory(); var binPath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); @@ -65,7 +65,6 @@ private string CreateTestModule(string onLoadBody) using System.Collections.Generic; using Garnet; using Garnet.server; - using Garnet.server.Module; using Tsavorite.core; namespace TestGarnetModule { @@ -79,7 +78,7 @@ public override void OnLoad(ModuleLoadContext context, string[] args) }"); } - var modulePath = Path.Combine(dir1, "TestModule.dll"); + var modulePath = Path.Combine(dir1, moduleName); var filesToCompile = new[] { testFilePath, Path.GetFullPath(@"../main/GarnetServer/Extensions/SetIfPM.cs", TestUtils.RootTestsProjectPath), @@ -173,6 +172,55 @@ public void TestModuleLoad() ClassicAssert.AreEqual("6", result.ToString()); } + + [Test] + public void TestModuleLoadUsingGarnetOptions() + { + var onLoad = + @"context.Initialize(""TestModule1"", 1); + + context.RegisterCommand(""TestModule1.SetIfPM"", new SetIfPMCustomCommand(), CommandType.ReadModifyWrite, + new RespCommandsInfo { Name = ""TestModule.SETIFPM"", Arity = 4, FirstKey = 1, LastKey = 1, Step = 1, + Flags = RespCommandFlags.DenyOom | RespCommandFlags.Write, AclCategories = RespAclCategories.String | RespAclCategories.Write });"; + + var onLoad2 = + @"context.Initialize(""TestModule2"", 1); + + context.RegisterProcedure(""TestModule2.SUM"", new Sum());"; + + var module1Path = CreateTestModule(onLoad, "TestModule1.dll"); + var module2Path = CreateTestModule(onLoad2, "TestModule2.dll"); + server.Dispose(); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, + disablePubSub: true, + loadModulePaths: [module1Path, module2Path]); + server.Start(); + + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + //// Test SETIFPM + string key = "testkey"; + string value = "foovalue1"; + db.StringSet(key, value); + var retValue = db.StringGet(key); + ClassicAssert.AreEqual(value, retValue.ToString()); + + string newValue = "foovalue2"; + var resp = db.Execute("TestModule1.SETIFPM", key, newValue, "foo"); + ClassicAssert.AreEqual("OK", (string)resp); + retValue = db.StringGet(key); + ClassicAssert.AreEqual(newValue, retValue.ToString()); + + // Test SUM command + db.StringSet("key1", "1"); + db.StringSet("key2", "2"); + db.StringSet("key3", "3"); + var result = db.Execute("TestModule2.SUM", "key1", "key2", "key3"); + ClassicAssert.IsNotNull(result); + ClassicAssert.AreEqual("6", result.ToString()); + } + [Test] public void TestModuleLoadCSArgs() { diff --git a/test/Garnet.test/RespSortedSetTests.cs b/test/Garnet.test/RespSortedSetTests.cs index 65e4a8bcf0..4f549a0467 100644 --- a/test/Garnet.test/RespSortedSetTests.cs +++ b/test/Garnet.test/RespSortedSetTests.cs @@ -97,7 +97,7 @@ public unsafe void SortedSetPopTest() db.SortedSetAdd("key1", "a", 1); db.SortedSetAdd("key1", "b", 2); - var session = new RespServerSession(new DummyNetworkSender(), server.Provider.StoreWrapper, null, null, null, false); + var session = new RespServerSession(0, new DummyNetworkSender(), server.Provider.StoreWrapper, null, null, false); var api = new TestBasicGarnetApi(session.storageSession, session.storageSession.basicContext, session.storageSession.objectStoreBasicContext); var key = Encoding.ASCII.GetBytes("key1"); fixed (byte* keyPtr = key) @@ -177,6 +177,140 @@ public void AddAndLength() ClassicAssert.IsFalse(exists); } + [Test] + public void AddWithOptions() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var key = "SortedSet_Add"; + + var added = db.SortedSetAdd(key, entries); + ClassicAssert.AreEqual(entries.Length, added); + + // XX - Only update elements that already exist. Don't add new elements. + var testEntries = new[] + { + new SortedSetEntry("a", 3), + new SortedSetEntry("b", 4), + new SortedSetEntry("k", 11), + new SortedSetEntry("l", 12), + }; + + added = db.SortedSetAdd(key, testEntries, SortedSetWhen.Exists); + ClassicAssert.AreEqual(0, added); + var scores = db.SortedSetScores(key, [new RedisValue("a"), new RedisValue("b")]); + CollectionAssert.AreEqual(new double[] { 3, 4 }, scores); + var count = db.SortedSetLength(key); + ClassicAssert.AreEqual(10, count); + + // NX - Only add new elements. Don't update already existing elements. + testEntries = + [ + new SortedSetEntry("a", 4), + new SortedSetEntry("b", 5), + new SortedSetEntry("k", 11), + new SortedSetEntry("l", 12), + ]; + + added = db.SortedSetAdd(key, testEntries, SortedSetWhen.NotExists); + ClassicAssert.AreEqual(2, added); + scores = db.SortedSetScores(key, [new RedisValue("a"), new RedisValue("b"), new RedisValue("k"), new RedisValue("l")]); + CollectionAssert.AreEqual(new double[] { 3, 4, 11, 12 }, scores); + count = db.SortedSetLength(key); + ClassicAssert.AreEqual(12, count); + + // LT - Only update existing elements if the new score is less than the current score. + testEntries = + [ + new SortedSetEntry("a", 4), + new SortedSetEntry("b", 3), + new SortedSetEntry("m", 13), + ]; + + added = db.SortedSetAdd(key, testEntries, SortedSetWhen.LessThan); + ClassicAssert.AreEqual(1, added); + scores = db.SortedSetScores(key, [new RedisValue("a"), new RedisValue("b"), new RedisValue("m")]); + CollectionAssert.AreEqual(new double[] { 3, 3, 13 }, scores); + count = db.SortedSetLength(key); + ClassicAssert.AreEqual(13, count); + + // GT - Only update existing elements if the new score is greater than the current score. + testEntries = + [ + new SortedSetEntry("a", 4), + new SortedSetEntry("b", 2), + new SortedSetEntry("n", 14), + ]; + + added = db.SortedSetAdd(key, testEntries, SortedSetWhen.GreaterThan); + ClassicAssert.AreEqual(1, added); + scores = db.SortedSetScores(key, [new RedisValue("a"), new RedisValue("b"), new RedisValue("n")]); + CollectionAssert.AreEqual(new double[] { 4, 3, 14 }, scores); + count = db.SortedSetLength(key); + ClassicAssert.AreEqual(14, count); + + // CH - Modify the return value from the number of new elements added, to the total number of elements changed + var testArgs = new object[] + { + key, "CH", + "1", "a", + "2", "b", + "3", "c", + "15", "o" + }; + + var resp = db.Execute("ZADD", testArgs); + ClassicAssert.IsTrue(int.TryParse(resp.ToString(), out var changed)); + ClassicAssert.AreEqual(3, changed); + + // INCR - When this option is specified ZADD acts like ZINCRBY + testArgs = [key, "INCR", "3.5", "a"]; + + resp = db.Execute("ZADD", testArgs); + ClassicAssert.IsTrue(double.TryParse(resp.ToString(), out var newVal)); + ClassicAssert.AreEqual(4.5, newVal); + } + + [Test] + public void AddWithOptionsErrorConditions() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var key = "SortedSet_Add"; + var sampleEntries = new[] { "1", "m1", "2", "m2" }; + + // XX & NX options are mutually exclusive + var args = new[] { key, "XX", "NX" }.Union(sampleEntries).ToArray(); + var ex = Assert.Throws(() => db.Execute("ZADD", args)); + ClassicAssert.AreEqual(Encoding.ASCII.GetString(CmdStrings.RESP_ERR_XX_NX_NOT_COMPATIBLE), ex.Message); + + // GT, LT & NX options are mutually exclusive + var argCombinations = new[] + { + new[] { key, "GT", "LT" }, + [key, "GT", "NX"], + [key, "LT", "NX"], + }; + + foreach (var argCombination in argCombinations) + { + args = argCombination.Union(sampleEntries).ToArray(); + ex = Assert.Throws(() => db.Execute("ZADD", args)); + ClassicAssert.AreEqual(Encoding.ASCII.GetString(CmdStrings.RESP_ERR_GT_LT_NX_NOT_COMPATIBLE), ex.Message); + } + + // INCR option supports only one score-element pair + args = new[] { key, "INCR" }.Union(sampleEntries).ToArray(); + ex = Assert.Throws(() => db.Execute("ZADD", args)); + ClassicAssert.AreEqual(Encoding.ASCII.GetString(CmdStrings.RESP_ERR_INCR_SUPPORTS_ONLY_SINGLE_PAIR), ex.Message); + + // No member-score pairs + args = [key, "XX", "CH"]; + ex = Assert.Throws(() => db.Execute("ZADD", args)); + ClassicAssert.AreEqual(Encoding.ASCII.GetString(CmdStrings.RESP_SYNTAX_ERROR), ex.Message); + } [Test] public void CanCreateLeaderBoard() @@ -217,8 +351,6 @@ public void CanGetScoresZCount() ClassicAssert.IsTrue(10 == card); } - - [Test] public void AddRemove() { @@ -2141,7 +2273,7 @@ public async Task CanFailWhenUseMultiWatchTest() await Task.Run(() => UpdateSortedSetKey(key)); res = lightClientRequest.SendCommand("EXEC"); - expectedResponse = "$-1"; + expectedResponse = "*-1"; ClassicAssert.AreEqual(res.AsSpan().Slice(0, expectedResponse.Length).ToArray(), expectedResponse); // This sequence should work diff --git a/test/Garnet.test/RespTests.cs b/test/Garnet.test/RespTests.cs index ea1ba7b056..dd011797e4 100644 --- a/test/Garnet.test/RespTests.cs +++ b/test/Garnet.test/RespTests.cs @@ -3,10 +3,13 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; +using System.Reflection; using System.Text; using System.Threading; using System.Threading.Tasks; +using Garnet.client; using Garnet.common; using Garnet.server; using NUnit.Framework; @@ -26,7 +29,7 @@ public void Setup() { r = new Random(674386); TestUtils.DeleteDirectory(TestUtils.MethodTestDir, wait: true); - server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, disablePubSub: true); + server = TestUtils.CreateGarnetServer(TestUtils.MethodTestDir, disablePubSub: false); server.Start(); } @@ -124,7 +127,7 @@ public void IsClusterSubCommand() ClassicAssert.True(RespCommandsInfo.TryGetRespCommandInfo("CLUSTER", out var clusterCommand), "Couldn't load CLUSTER command details"); ClassicAssert.IsNotNull(clusterCommand.SubCommands, "CLUSTER didn't have any subcommands"); - IEnumerable clusterSubCommands = clusterCommand.SubCommands.Select(static s => s.SubCommand.Value); + IEnumerable clusterSubCommands = clusterCommand.SubCommands.Select(static s => s.Command); foreach (var cmd in Enum.GetValues()) { var expectedRes = clusterSubCommands.Contains(cmd); @@ -1141,6 +1144,173 @@ public void MultipleExistsKeysAndObjects() ClassicAssert.AreEqual(3, exists); } + #region Expiretime + + [Test] + public void ExpiretimeWithStingValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + string key = "key1"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + db.StringSet(key, "test1", expireTimeSpan); + + var actualExpireTime = (long)db.Execute("EXPIRETIME", key); + + ClassicAssert.GreaterOrEqual(actualExpireTime, DateTimeOffset.UtcNow.ToUnixTimeSeconds()); + var expireExpireTime = DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds(); + ClassicAssert.LessOrEqual(actualExpireTime, expireExpireTime); + } + + [Test] + public void ExpiretimeWithUnknownKey() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var expireTime = (long)db.Execute("EXPIRETIME", "keyZ"); + + ClassicAssert.AreEqual(-2, expireTime); + } + + [Test] + public void ExpiretimeWithNoKeyExpiration() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + string key = "key1"; + db.StringSet(key, "test1"); + + var expireTime = (long)db.Execute("EXPIRETIME", key); + + ClassicAssert.AreEqual(-1, expireTime); + } + + [Test] + public void ExpiretimeWithInvalidNumberOfArgs() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var exception = Assert.Throws(() => db.Execute("EXPIRETIME")); + Assert.That(exception.Message, Does.StartWith("ERR wrong number of arguments")); + } + + [Test] + public void ExpiretimeWithObjectValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key1"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var count = db.ListRightPush(key, origList); + var expirySet = db.KeyExpire(key, expireTimeSpan); + + var actualExpireTime = (long)db.Execute("EXPIRETIME", key); + + ClassicAssert.GreaterOrEqual(actualExpireTime, DateTimeOffset.UtcNow.ToUnixTimeSeconds()); + var expireExpireTime = DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds(); + ClassicAssert.LessOrEqual(actualExpireTime, expireExpireTime); + } + + [Test] + public void ExpiretimeWithNoKeyExpirationForObjectValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key1"; + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var count = db.ListRightPush(key, origList); + + var expireTime = (long)db.Execute("EXPIRETIME", key); + + ClassicAssert.AreEqual(-1, expireTime); + } + + [Test] + public void PExpiretimeWithStingValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + string key = "key1"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + db.StringSet(key, "test1", expireTimeSpan); + + var actualExpireTime = (long)db.Execute("PEXPIRETIME", key); + + ClassicAssert.GreaterOrEqual(actualExpireTime, DateTimeOffset.UtcNow.ToUnixTimeMilliseconds()); + var expireExpireTime = DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + ClassicAssert.LessOrEqual(actualExpireTime, expireExpireTime); + } + + [Test] + public void PExpiretimeWithUnknownKey() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var expireTime = (long)db.Execute("PEXPIRETIME", "keyZ"); + + ClassicAssert.AreEqual(-2, expireTime); + } + + [Test] + public void PExpiretimeWithNoKeyExpiration() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + string key = "key1"; + db.StringSet(key, "test1"); + + var expireTime = (long)db.Execute("PEXPIRETIME", key); + + ClassicAssert.AreEqual(-1, expireTime); + } + + [Test] + public void PExpiretimeWithInvalidNumberOfArgs() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var exception = Assert.Throws(() => db.Execute("PEXPIRETIME")); + Assert.That(exception.Message, Does.StartWith("ERR wrong number of arguments")); + } + + [Test] + public void PExpiretimeWithObjectValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key1"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var count = db.ListRightPush(key, origList); + var expirySet = db.KeyExpire(key, expireTimeSpan); + + var actualExpireTime = (long)db.Execute("PEXPIRETIME", key); + + ClassicAssert.GreaterOrEqual(actualExpireTime, DateTimeOffset.UtcNow.ToUnixTimeMilliseconds()); + var expireExpireTime = DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + ClassicAssert.LessOrEqual(actualExpireTime, expireExpireTime); + } + + [Test] + public void PExpiretimeWithNoKeyExpirationForObjectValue() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key1"; + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var count = db.ListRightPush(key, origList); + + var expireTime = (long)db.Execute("PEXPIRETIME", key); + + ClassicAssert.AreEqual(-1, expireTime); + } + + #endregion [Test] public void SingleRename() @@ -1160,6 +1330,26 @@ public void SingleRename() ClassicAssert.AreEqual(null, origValue); } + [Test] + public void SingleRenameWithExpiry() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var origValue = "test1"; + db.StringSet("key1", origValue, TimeSpan.FromMinutes(1)); + + db.KeyRename("key1", "key2"); + string retValue = db.StringGet("key2"); + + ClassicAssert.AreEqual(origValue, retValue); + + var ttl = db.KeyTimeToLive("key2"); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); + } + [Test] public void SingleRenameKeyEdgeCase([Values] bool withoutObjectStore) { @@ -1217,143 +1407,414 @@ public void SingleRenameObjectStore() } [Test] - public void CanSelectCommand() + public void SingleRenameObjectStoreWithExpiry() { using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); var db = redis.GetDatabase(0); - var reply = db.Execute("SELECT", "0"); - ClassicAssert.IsTrue(reply.ToString() == "OK"); - Assert.Throws(() => db.Execute("SELECT", "1")); - //select again the def db - db.Execute("SELECT", "0"); - } + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var key1 = "lkey1"; + var count = db.ListRightPush(key1, origList); + ClassicAssert.AreEqual(4, count); - [Test] - public void CanSelectCommandLC() - { - using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); + var result = db.ListRange(key1); + ClassicAssert.AreEqual(origList, result); - var expectedResponse = "-ERR invalid database index.\r\n+PONG\r\n"; - var response = lightClientRequest.Execute("SELECT 1", "PING", expectedResponse.Length); - ClassicAssert.AreEqual(expectedResponse, response); + var expirySet = db.KeyExpire("lkey1", TimeSpan.FromMinutes(1)); + ClassicAssert.IsTrue(expirySet); + + var key2 = "lkey2"; + var rb = db.KeyRename(key1, key2); + ClassicAssert.IsTrue(rb); + result = db.ListRange(key1); + ClassicAssert.AreEqual(Array.Empty(), result); + + result = db.ListRange(key2); + ClassicAssert.AreEqual(origList, result); + + var ttl = db.KeyTimeToLive("lkey2"); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); } [Test] - [TestCase(10)] - [TestCase(50)] - [TestCase(100)] - public void CanDoCommandsInChunks(int bytesSent) + public void SingleRenameWithOldKeyAndNewKeyAsSame() { - // SETEX - using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var origValue = "test1"; + var key = "key1"; + db.StringSet(key, origValue); - var expectedResponse = "+OK\r\n"; - var response = lightClientRequest.Execute("SETEX mykey 1 abcdefghij", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var result = db.KeyRename(key, key); - // GET - expectedResponse = "$10\r\nabcdefghij\r\n"; - response = lightClientRequest.Execute("GET mykey", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + ClassicAssert.IsTrue(result); + string retValue = db.StringGet(key); + ClassicAssert.AreEqual(origValue, retValue); + } - Thread.Sleep(2000); + #region RENAMENX - // GET - expectedResponse = "$-1\r\n"; - response = lightClientRequest.Execute("GET mykey", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + [Test] + public void SingleRenameNx() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); - // DECR - expectedResponse = "+OK\r\n"; - response = lightClientRequest.Execute("SET mykeydecr 1", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + string origValue = "test1"; + db.StringSet("key1", origValue); - expectedResponse = ":0\r\n"; - response = lightClientRequest.Execute("DECR mykeydecr", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var result = db.KeyRename("key1", "key2", When.NotExists); + ClassicAssert.IsTrue(result); - expectedResponse = "$1\r\n0\r\n"; - response = lightClientRequest.Execute("GET mykeydecr", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + string retValue = db.StringGet("key2"); + ClassicAssert.AreEqual(origValue, retValue); - // DEL - expectedResponse = ":1\r\n"; - response = lightClientRequest.Execute("DEL mykeydecr", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + origValue = db.StringGet("key1"); + ClassicAssert.AreEqual(null, origValue); + } - expectedResponse = "$-1\r\n"; - response = lightClientRequest.Execute("GET mykeydecr", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + [Test] + public void SingleRenameNxWithNewKeyAlreadyExist() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); - // EXISTS - expectedResponse = ":0\r\n"; - response = lightClientRequest.Execute("EXISTS mykeydecr", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + string origValue = "test1"; + string origValue2 = "test2"; + db.StringSet("key1", origValue); + db.StringSet("key2", origValue2); - // SET - expectedResponse = "+OK\r\n"; - response = lightClientRequest.Execute("SET mykey 1", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var result = db.KeyRename("key1", "key2", When.NotExists); + ClassicAssert.IsFalse(result); - // RENAME - expectedResponse = "+OK\r\n"; - response = lightClientRequest.Execute("RENAME mykey mynewkey", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + string retValue2 = db.StringGet("key2"); + ClassicAssert.AreEqual(origValue2, retValue2); - // GET - expectedResponse = "$1\r\n1\r\n"; - response = lightClientRequest.Execute("GET mynewkey", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + string retValue1 = db.StringGet("key1"); + ClassicAssert.AreEqual(origValue, retValue1); } - [Test] - [TestCase(10)] - [TestCase(50)] - [TestCase(100)] - public void CanSetGetCommandsChunks(int bytesSent) + public void SingleRenameNxWithExpiry() { - using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); - var sb = new StringBuilder(); - - for (int i = 1; i <= 100; i++) - { - sb.Append($" mykey-{i} {i * 10}"); - } + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); - // MSET - var expectedResponse = "+OK\r\n"; - var response = lightClientRequest.Execute($"MSET{sb}", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var origValue = "test1"; + db.StringSet("key1", origValue, TimeSpan.FromMinutes(1)); - expectedResponse = ":100\r\n"; - response = lightClientRequest.Execute($"DBSIZE", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var result = db.KeyRename("key1", "key2", When.NotExists); + ClassicAssert.IsTrue(result); - sb.Clear(); - for (int i = 1; i <= 100; i++) - { - sb.Append($" mykey-{i}"); - } + string retValue = db.StringGet("key2"); + ClassicAssert.AreEqual(origValue, retValue); - // MGET - expectedResponse = "*100\r\n$2\r\n10\r\n$2\r\n20\r\n$2\r\n30\r\n$2\r\n40\r\n$2\r\n50\r\n$2\r\n60\r\n$2\r\n70\r\n$2\r\n80\r\n$2\r\n90\r\n$3\r\n100\r\n$3\r\n110\r\n$3\r\n120\r\n$3\r\n130\r\n$3\r\n140\r\n$3\r\n150\r\n$3\r\n160\r\n$3\r\n170\r\n$3\r\n180\r\n$3\r\n190\r\n$3\r\n200\r\n$3\r\n210\r\n$3\r\n220\r\n$3\r\n230\r\n$3\r\n240\r\n$3\r\n250\r\n$3\r\n260\r\n$3\r\n270\r\n$3\r\n280\r\n$3\r\n290\r\n$3\r\n300\r\n$3\r\n310\r\n$3\r\n320\r\n$3\r\n330\r\n$3\r\n340\r\n$3\r\n350\r\n$3\r\n360\r\n$3\r\n370\r\n$3\r\n380\r\n$3\r\n390\r\n$3\r\n400\r\n$3\r\n410\r\n$3\r\n420\r\n$3\r\n430\r\n$3\r\n440\r\n$3\r\n450\r\n$3\r\n460\r\n$3\r\n470\r\n$3\r\n480\r\n$3\r\n490\r\n$3\r\n500\r\n$3\r\n510\r\n$3\r\n520\r\n$3\r\n530\r\n$3\r\n540\r\n$3\r\n550\r\n$3\r\n560\r\n$3\r\n570\r\n$3\r\n580\r\n$3\r\n590\r\n$3\r\n600\r\n$3\r\n610\r\n$3\r\n620\r\n$3\r\n630\r\n$3\r\n640\r\n$3\r\n650\r\n$3\r\n660\r\n$3\r\n670\r\n$3\r\n680\r\n$3\r\n690\r\n$3\r\n700\r\n$3\r\n710\r\n$3\r\n720\r\n$3\r\n730\r\n$3\r\n740\r\n$3\r\n750\r\n$3\r\n760\r\n$3\r\n770\r\n$3\r\n780\r\n$3\r\n790\r\n$3\r\n800\r\n$3\r\n810\r\n$3\r\n820\r\n$3\r\n830\r\n$3\r\n840\r\n$3\r\n850\r\n$3\r\n860\r\n$3\r\n870\r\n$3\r\n880\r\n$3\r\n890\r\n$3\r\n900\r\n$3\r\n910\r\n$3\r\n920\r\n$3\r\n930\r\n$3\r\n940\r\n$3\r\n950\r\n$3\r\n960\r\n$3\r\n970\r\n$3\r\n980\r\n$3\r\n990\r\n$4\r\n1000\r\n"; - response = lightClientRequest.Execute($"MGET{sb}", expectedResponse.Length, bytesSent); - ClassicAssert.AreEqual(expectedResponse, response); + var ttl = db.KeyTimeToLive("key2"); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); } [Test] - public void PersistTTLTest() + public void SingleRenameNxWithExpiryAndNewKeyAlreadyExist() { using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); var db = redis.GetDatabase(0); - var key = "expireKey"; - var val = "expireValue"; - var expire = 2; + var origValue = "test1"; + string origValue2 = "test2"; + db.StringSet("key1", origValue, TimeSpan.FromMinutes(1)); + db.StringSet("key2", origValue2, TimeSpan.FromMinutes(1)); - var ttl = db.Execute("TTL", key); + var result = db.KeyRename("key1", "key2", When.NotExists); + ClassicAssert.IsFalse(result); + + string retValue = db.StringGet("key2"); + ClassicAssert.AreEqual(origValue2, retValue); + + var ttl = db.KeyTimeToLive("key2"); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); + + string retValue1 = db.StringGet("key1"); + CollectionAssert.AreEqual(origValue, retValue1); + } + + [Test] + public void SingleRenameNxObjectStore() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var key1 = "lkey1"; + var count = db.ListRightPush(key1, origList); + var result = db.ListRange(key1); + var key2 = "lkey2"; + + var rb = db.KeyRename(key1, key2, When.NotExists); + ClassicAssert.IsTrue(rb); + + result = db.ListRange(key1); + CollectionAssert.AreEqual(Array.Empty(), result); + + result = db.ListRange(key2); + CollectionAssert.AreEqual(origList, result); + } + + [Test] + public void SingleRenameNxObjectStoreWithNewKeyAlreadyExist() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var origList2 = new RedisValue[] { "z", "y", "z" }; + var key1 = "lkey1"; + var key2 = "lkey2"; + db.ListRightPush(key1, origList); + db.ListRightPush(key2, origList2); + + var rb = db.KeyRename(key1, key2, When.NotExists); + ClassicAssert.IsFalse(rb); + + var result = db.ListRange(key1); + ClassicAssert.AreEqual(origList, result); + + result = db.ListRange(key2); + ClassicAssert.AreEqual(origList2, result); + } + + [Test] + public void SingleRenameNxObjectStoreWithExpiry() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var key1 = "lkey1"; + var count = db.ListRightPush(key1, origList); + var result = db.ListRange(key1); + var expirySet = db.KeyExpire("lkey1", TimeSpan.FromMinutes(1)); + var key2 = "lkey2"; + + var rb = db.KeyRename(key1, key2, When.NotExists); + ClassicAssert.IsTrue(rb); + + result = db.ListRange(key1); + ClassicAssert.AreEqual(Array.Empty(), result); + + result = db.ListRange(key2); + ClassicAssert.AreEqual(origList, result); + + var ttl = db.KeyTimeToLive(key2); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); + } + + [Test] + public void SingleRenameNxObjectStoreWithExpiryAndNewKeyAlreadyExist() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var origList = new RedisValue[] { "a", "b", "c", "d" }; + var origList2 = new RedisValue[] { "x", "y", "z" }; + var key1 = "lkey1"; + var key2 = "lkey2"; + db.ListRightPush(key1, origList); + db.ListRightPush(key2, origList2); + var result = db.ListRange(key1); + var expirySet = db.KeyExpire(key1, TimeSpan.FromMinutes(1)); + + var rb = db.KeyRename(key1, key2, When.NotExists); + ClassicAssert.IsFalse(rb); + + result = db.ListRange(key1); + ClassicAssert.AreEqual(origList, result); + + result = db.ListRange(key2); + ClassicAssert.AreEqual(origList2, result); + + var ttl = db.KeyTimeToLive(key1); + ClassicAssert.IsTrue(ttl.HasValue); + ClassicAssert.Greater(ttl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(ttl.Value.TotalMilliseconds, TimeSpan.FromMinutes(1).TotalMilliseconds); + + var ttl2 = db.KeyTimeToLive(key2); + ClassicAssert.IsFalse(ttl2.HasValue); + } + + [Test] + public void SingleRenameNxWithKeyNotExist() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var exception = Assert.Throws(() => db.KeyRename("key1", "key2", When.NotExists)); + ClassicAssert.AreEqual("ERR no such key", exception.Message); + } + + [Test] + public void SingleRenameNxWithOldKeyAndNewKeyAsSame() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var origValue = "test1"; + var key = "key1"; + db.StringSet(key, origValue); + + var result = db.KeyRename(key, key, When.NotExists); + + ClassicAssert.IsTrue(result); + string retValue = db.StringGet(key); + ClassicAssert.AreEqual(origValue, retValue); + } + + #endregion + + [Test] + public void CanSelectCommand() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var reply = db.Execute("SELECT", "0"); + ClassicAssert.IsTrue(reply.ToString() == "OK"); + Assert.Throws(() => db.Execute("SELECT", "1")); + + //select again the def db + db.Execute("SELECT", "0"); + } + + [Test] + public void CanSelectCommandLC() + { + using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); + + var expectedResponse = "-ERR invalid database index.\r\n+PONG\r\n"; + var response = lightClientRequest.Execute("SELECT 1", "PING", expectedResponse.Length); + ClassicAssert.AreEqual(expectedResponse, response); + } + + [Test] + [TestCase(10)] + [TestCase(50)] + [TestCase(100)] + public void CanDoCommandsInChunks(int bytesSent) + { + // SETEX + using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); + + var expectedResponse = "+OK\r\n"; + var response = lightClientRequest.Execute("SETEX mykey 1 abcdefghij", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // GET + expectedResponse = "$10\r\nabcdefghij\r\n"; + response = lightClientRequest.Execute("GET mykey", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + Thread.Sleep(2000); + + // GET + expectedResponse = "$-1\r\n"; + response = lightClientRequest.Execute("GET mykey", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // DECR + expectedResponse = "+OK\r\n"; + response = lightClientRequest.Execute("SET mykeydecr 1", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + expectedResponse = ":0\r\n"; + response = lightClientRequest.Execute("DECR mykeydecr", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + expectedResponse = "$1\r\n0\r\n"; + response = lightClientRequest.Execute("GET mykeydecr", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // DEL + expectedResponse = ":1\r\n"; + response = lightClientRequest.Execute("DEL mykeydecr", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + expectedResponse = "$-1\r\n"; + response = lightClientRequest.Execute("GET mykeydecr", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // EXISTS + expectedResponse = ":0\r\n"; + response = lightClientRequest.Execute("EXISTS mykeydecr", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // SET + expectedResponse = "+OK\r\n"; + response = lightClientRequest.Execute("SET mykey 1", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // RENAME + expectedResponse = "+OK\r\n"; + response = lightClientRequest.Execute("RENAME mykey mynewkey", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + // GET + expectedResponse = "$1\r\n1\r\n"; + response = lightClientRequest.Execute("GET mynewkey", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + } + + + [Test] + [TestCase(10)] + [TestCase(50)] + [TestCase(100)] + public void CanSetGetCommandsChunks(int bytesSent) + { + using var lightClientRequest = TestUtils.CreateRequest(countResponseType: CountResponseType.Bytes); + var sb = new StringBuilder(); + + for (int i = 1; i <= 100; i++) + { + sb.Append($" mykey-{i} {i * 10}"); + } + + // MSET + var expectedResponse = "+OK\r\n"; + var response = lightClientRequest.Execute($"MSET{sb}", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + expectedResponse = ":100\r\n"; + response = lightClientRequest.Execute($"DBSIZE", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + + sb.Clear(); + for (int i = 1; i <= 100; i++) + { + sb.Append($" mykey-{i}"); + } + + // MGET + expectedResponse = "*100\r\n$2\r\n10\r\n$2\r\n20\r\n$2\r\n30\r\n$2\r\n40\r\n$2\r\n50\r\n$2\r\n60\r\n$2\r\n70\r\n$2\r\n80\r\n$2\r\n90\r\n$3\r\n100\r\n$3\r\n110\r\n$3\r\n120\r\n$3\r\n130\r\n$3\r\n140\r\n$3\r\n150\r\n$3\r\n160\r\n$3\r\n170\r\n$3\r\n180\r\n$3\r\n190\r\n$3\r\n200\r\n$3\r\n210\r\n$3\r\n220\r\n$3\r\n230\r\n$3\r\n240\r\n$3\r\n250\r\n$3\r\n260\r\n$3\r\n270\r\n$3\r\n280\r\n$3\r\n290\r\n$3\r\n300\r\n$3\r\n310\r\n$3\r\n320\r\n$3\r\n330\r\n$3\r\n340\r\n$3\r\n350\r\n$3\r\n360\r\n$3\r\n370\r\n$3\r\n380\r\n$3\r\n390\r\n$3\r\n400\r\n$3\r\n410\r\n$3\r\n420\r\n$3\r\n430\r\n$3\r\n440\r\n$3\r\n450\r\n$3\r\n460\r\n$3\r\n470\r\n$3\r\n480\r\n$3\r\n490\r\n$3\r\n500\r\n$3\r\n510\r\n$3\r\n520\r\n$3\r\n530\r\n$3\r\n540\r\n$3\r\n550\r\n$3\r\n560\r\n$3\r\n570\r\n$3\r\n580\r\n$3\r\n590\r\n$3\r\n600\r\n$3\r\n610\r\n$3\r\n620\r\n$3\r\n630\r\n$3\r\n640\r\n$3\r\n650\r\n$3\r\n660\r\n$3\r\n670\r\n$3\r\n680\r\n$3\r\n690\r\n$3\r\n700\r\n$3\r\n710\r\n$3\r\n720\r\n$3\r\n730\r\n$3\r\n740\r\n$3\r\n750\r\n$3\r\n760\r\n$3\r\n770\r\n$3\r\n780\r\n$3\r\n790\r\n$3\r\n800\r\n$3\r\n810\r\n$3\r\n820\r\n$3\r\n830\r\n$3\r\n840\r\n$3\r\n850\r\n$3\r\n860\r\n$3\r\n870\r\n$3\r\n880\r\n$3\r\n890\r\n$3\r\n900\r\n$3\r\n910\r\n$3\r\n920\r\n$3\r\n930\r\n$3\r\n940\r\n$3\r\n950\r\n$3\r\n960\r\n$3\r\n970\r\n$3\r\n980\r\n$3\r\n990\r\n$4\r\n1000\r\n"; + response = lightClientRequest.Execute($"MGET{sb}", expectedResponse.Length, bytesSent); + ClassicAssert.AreEqual(expectedResponse, response); + } + + [Test] + public void PersistTTLTest() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var key = "expireKey"; + var val = "expireValue"; + var expire = 2; + + var ttl = db.Execute("TTL", key); ClassicAssert.AreEqual(-2, (int)ttl); db.StringSet(key, val); @@ -1524,9 +1985,7 @@ public void KeyExpireObjectTest(string command) } [Test] - [TestCase("EXPIRE")] - [TestCase("PEXPIRE")] - public void KeyExpireOptionsTest(string command) + public void KeyExpireOptionsTest([Values("EXPIRE", "PEXPIRE")] string command, [Values(false, true)] bool testCaseSensitivity) { using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); var db = redis.GetDatabase(0); @@ -1535,32 +1994,32 @@ public void KeyExpireOptionsTest(string command) object[] args = [key, 1000, ""]; db.StringSet(key, key); - args[2] = "XX";// XX -- Set expiry only when the key has an existing expiry + args[2] = testCaseSensitivity ? "Xx" : "XX";// XX -- Set expiry only when the key has an existing expiry bool resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsFalse(resp);//XX return false no existing expiry - args[2] = "NX";// NX -- Set expiry only when the key has no expiry + args[2] = testCaseSensitivity ? "nX" : "NX";// NX -- Set expiry only when the key has no expiry resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsTrue(resp);// NX return true no existing expiry - args[2] = "NX";// NX -- Set expiry only when the key has no expiry + args[2] = testCaseSensitivity ? "nx" : "NX";// NX -- Set expiry only when the key has no expiry resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsFalse(resp);// NX return false existing expiry args[1] = 50; - args[2] = "XX";// XX -- Set expiry only when the key has an existing expiry + args[2] = testCaseSensitivity ? "xx" : "XX";// XX -- Set expiry only when the key has an existing expiry resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsTrue(resp);// XX return true existing expiry var time = db.KeyTimeToLive(key); ClassicAssert.IsTrue(time.Value.TotalSeconds <= (double)((int)args[1]) && time.Value.TotalSeconds > 0); args[1] = 1; - args[2] = "GT";// GT -- Set expiry only when the new expiry is greater than current one + args[2] = testCaseSensitivity ? "Gt" : "GT";// GT -- Set expiry only when the new expiry is greater than current one resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsFalse(resp); // GT return false new expiry < current expiry args[1] = 1000; - args[2] = "GT";// GT -- Set expiry only when the new expiry is greater than current one + args[2] = testCaseSensitivity ? "gT" : "GT";// GT -- Set expiry only when the new expiry is greater than current one resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsTrue(resp); // GT return true new expiry > current expiry time = db.KeyTimeToLive(key); @@ -1571,12 +2030,12 @@ public void KeyExpireOptionsTest(string command) ClassicAssert.IsTrue(time.Value.TotalMilliseconds > 500); args[1] = 2000; - args[2] = "LT";// LT -- Set expiry only when the new expiry is less than current one + args[2] = testCaseSensitivity ? "lt" : "LT";// LT -- Set expiry only when the new expiry is less than current one resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsFalse(resp); // LT return false new expiry > current expiry args[1] = 15; - args[2] = "LT";// LT -- Set expiry only when the new expiry is less than current one + args[2] = testCaseSensitivity ? "lT" : "LT";// LT -- Set expiry only when the new expiry is less than current one resp = (bool)db.Execute($"{command}", args); ClassicAssert.IsTrue(resp); // LT return true new expiry < current expiry time = db.KeyTimeToLive(key); @@ -1588,40 +2047,627 @@ public void KeyExpireOptionsTest(string command) } [Test] - [TestCase("EXPIRE")] - [TestCase("PEXPIRE")] - public void KeyExpireBadOptionTests(string command) + [TestCase("EXPIRE")] + [TestCase("PEXPIRE")] + public void KeyExpireBadOptionTests(string command) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + ClassicAssert.IsTrue(db.StringSet("foo", "bar")); + + // Invalid should be rejected + { + var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "Q")); + ClassicAssert.AreEqual("ERR Unsupported option Q", exc.Message); + } + + // None should be rejected + { + var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "None")); + ClassicAssert.AreEqual("ERR Unsupported option None", exc.Message); + } + + // Numeric equivalent should be rejected + { + var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "1")); + ClassicAssert.AreEqual("ERR Unsupported option 1", exc.Message); + } + + // Numeric out of bounds should be rejected + { + var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "128")); + ClassicAssert.AreEqual("ERR Unsupported option 128", exc.Message); + } + } + + #region ExpireAt + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithStringAndObject(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + + var actualResult = (int)db.Execute(command, "key", expireTimeUnix); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT")] + [TestCase("PEXPIREAT")] + public void KeyExpireAtWithUnknownKey(string command) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix); + ClassicAssert.AreEqual(actualResult, 0); + } + + [Test] + [TestCase("EXPIREAT")] + [TestCase("PEXPIREAT")] + public void KeyExpireAtWithoutArgs(string command) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + + Assert.Throws(() => db.Execute(command, key)); + } + + [Test] + [TestCase("EXPIREAT")] + [TestCase("PEXPIREAT")] + public void KeyExpireAtWithUnknownArgs(string command) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + Assert.Throws(() => db.Execute(command, key, expireTimeUnix, "YY")); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithNxOptionAndKeyHasExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "Nx"); + ClassicAssert.AreEqual(actualResult, 0); + + // Test if the existing expiry time is still the same + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithNxOptionAndKeyHasNoExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var expireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "nX"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxOptionAndKeyHasExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "Xx"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxOptionAndKeyHasNoExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var expireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "xX"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsFalse(actualTtl.HasValue); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithGtOptionAndExistingKeyHasSmallerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "gT"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithGtOptionAndExistingKeyHasLargerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "Gt"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithGtOptionAndExistingKeyNoExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "GT"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsFalse(actualTtl.HasValue); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndGtOptionAndExistingKeyHasSmallerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "xx", "GT"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndGtOptionAndExistingKeyHasLargerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "gt", "XX"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndGtOptionAndExistingKeyNoExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "Gt", "xX"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsFalse(actualTtl.HasValue); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithLtOptionAndExistingKeyHasSmallerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "lT"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithLtOptionAndExistingKeyHasLargerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "LT"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithLtOptionAndExistingKeyNoExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + } + else + { + db.StringSet(key, "valueA"); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "LT"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndLtOptionAndExistingKeyHasSmallerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(1); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(10); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "LT", "XX"); + ClassicAssert.AreEqual(actualResult, 0); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, existingExpireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndLtOptionAndExistingKeyHasLargerExpire(string command, bool isObject) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + var existingExpireTimeSpan = TimeSpan.FromMinutes(10); + if (isObject) + { + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); + db.KeyExpire(key, existingExpireTimeSpan); + } + else + { + db.StringSet(key, "valueA", existingExpireTimeSpan); + } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "xX", "Lt"); + ClassicAssert.AreEqual(actualResult, 1); + + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsTrue(actualTtl.HasValue); + ClassicAssert.Greater(actualTtl.Value.TotalMilliseconds, 0); + ClassicAssert.LessOrEqual(actualTtl.Value.TotalMilliseconds, expireTimeSpan.TotalMilliseconds); + } + + [Test] + [TestCase("EXPIREAT", false)] + [TestCase("EXPIREAT", true)] + [TestCase("PEXPIREAT", false)] + [TestCase("PEXPIREAT", true)] + public void KeyExpireAtWithXxAndLtOptionAndExistingKeyNoExpire(string command, bool isObject) { using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); var db = redis.GetDatabase(0); - - ClassicAssert.IsTrue(db.StringSet("foo", "bar")); - - // Invalid should be rejected + var key = "key"; + if (isObject) { - var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "Q")); - ClassicAssert.AreEqual("ERR Unsupported option Q", exc.Message); + db.SortedSetAdd(key, [new SortedSetEntry("element", 1.0)]); } - - // None should be rejected + else { - var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "None")); - ClassicAssert.AreEqual("ERR Unsupported option None", exc.Message); + db.StringSet(key, "valueA"); } + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); - // Numeric equivalent should be rejected - { - var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "1")); - ClassicAssert.AreEqual("ERR Unsupported option 1", exc.Message); - } + var actualResult = (int)db.Execute(command, key, expireTimeUnix, "XX", "LT"); + ClassicAssert.AreEqual(actualResult, 0); - // Numeric out of bounds should be rejected - { - var exc = ClassicAssert.Throws(() => db.Execute(command, "foo", "100", "128")); - ClassicAssert.AreEqual("ERR Unsupported option 128", exc.Message); - } + var actualTtl = db.KeyTimeToLive(key); + ClassicAssert.IsFalse(actualTtl.HasValue); + } + + [Test] + [TestCase("EXPIREAT", "XX", "NX")] + [TestCase("EXPIREAT", "NX", "GT")] + [TestCase("EXPIREAT", "LT", "NX")] + [TestCase("PEXPIREAT", "XX", "NX")] + [TestCase("PEXPIREAT", "NX", "GT")] + [TestCase("PEXPIREAT", "LT", "NX")] + public void KeyExpireAtWithInvalidOptionCombination(string command, string optionA, string optionB) + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + var key = "key"; + db.StringSet(key, "valueA"); + var expireTimeSpan = TimeSpan.FromMinutes(1); + var expireTimeUnix = command == "EXPIREAT" ? DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeSeconds() : DateTimeOffset.UtcNow.Add(expireTimeSpan).ToUnixTimeMilliseconds(); + + Assert.Throws(() => db.Execute(command, key, expireTimeUnix, optionA, optionA)); } + #endregion + [Test] public async Task ReAddExpiredKey() { @@ -2227,5 +3273,440 @@ public void AsyncTest1() response = lightClientRequest.Execute($"GET {firstKey}", expectedResponse.Length); ClassicAssert.AreEqual(expectedResponse, response); } + + [Test] + public void ClientIdTest() + { + long id1; + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var result = db.Execute("CLIENT", "ID"); + + ClassicAssert.IsNotNull(result); + ClassicAssert.AreEqual(ResultType.Integer, result.Resp2Type); + + id1 = (long)result; + + ClassicAssert.IsTrue(id1 > 0, "Client ids must be > 0"); + } + + long id2; + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var result = db.Execute("CLIENT", "ID"); + + ClassicAssert.IsNotNull(result); + ClassicAssert.AreEqual(ResultType.Integer, result.Resp2Type); + + id2 = (long)result; + + ClassicAssert.IsTrue(id2 > 0, "Client ids must be > 0"); + } + + ClassicAssert.AreNotEqual(id1, id2, "CLIENT IDs must be unique"); + ClassicAssert.IsTrue(id2 > id1, "CLIENT IDs should be monotonic"); + + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "ID", "foo")); + ClassicAssert.AreEqual("ERR wrong number of arguments for 'client|id' command", exc.Message); + } + } + + [Test] + public void ClientInfoTest() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + var result = (string)db.Execute("CLIENT", "INFO"); + AssertExpectedClientFields(result); + + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "INFO", "foo")); + ClassicAssert.AreEqual("ERR wrong number of arguments for 'client|info' command", exc.Message); + } + + [Test] + public void ClientListTest() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + // List everything + { + var list = (string)db.Execute("CLIENT", "LIST"); + AssertExpectedClientFields(list); + } + + // List by id + { + var id = (long)db.Execute("CLIENT", "ID"); + + var list = (string)db.Execute("CLIENT", "LIST", "ID", id, 123); + AssertExpectedClientFields(list); + } + + // List by type + { + var list = (string)db.Execute("CLIENT", "LIST", "TYPE", "NORMAL"); + AssertExpectedClientFields(list); + } + } + + [Test] + public void ClientListErrorTest() + { + using var redis = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var db = redis.GetDatabase(0); + + // Bad option + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "foo")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + + // Missing type + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "TYPE")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + + // Bad type + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "TYPE", "foo")); + ClassicAssert.AreEqual("ERR Unknown client type 'foo'", exc.Message); + } + + // Invalid type + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "TYPE", "Invalid")); + ClassicAssert.AreEqual("ERR Unknown client type 'Invalid'", exc.Message); + } + + // Numeric type + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "TYPE", "1")); + ClassicAssert.AreEqual("ERR Unknown client type '1'", exc.Message); + } + + // Missing id + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "ID")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + + // Bad id + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "ID", "abc")); + ClassicAssert.AreEqual("ERR Invalid client ID", exc.Message); + } + + // Combo - Redis docs sort of imply this is supported, but that is not the case in testing + { + var exc = ClassicAssert.Throws(() => db.Execute("CLIENT", "LIST", "TYPE", "NORMAL", "ID", "1")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + } + + [Test] + public async Task ClientKillTestAsync() + { + using var mainConnection = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var mainDB = mainConnection.GetDatabase(0); + var mainId = (long)mainDB.Execute("CLIENT", "ID"); + + // Kill old style (by remote endpoint) + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + var remoteEndpoint = GetFlagForSessionId(targetId, "addr", mainDB); + + // Kill acknowledged + var res = mainDB.Execute("CLIENT", "KILL", remoteEndpoint); + ClassicAssert.AreEqual("OK", (string)res); + + AssertNotConnected(targetConnection); + } + + // Kill by id + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "ID", targetId); + ClassicAssert.AreEqual(1, (int)res); + + AssertNotConnected(targetConnection); + } + + // Kill by type = NORMAL + { + using var targetConnection = await ConnectAsync(); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "TYPE", "NORMAL"); + + // SE.Redis spins up multiple connections, so we can kill more than 1 (but at least 1) connection + ClassicAssert.IsTrue((int)res >= 1); + + AssertNotConnected(targetConnection); + } + + // Kill by type = PUBSUB + { + using var targetConnection = await ConnectAsync(); + + _ = await targetConnection.ExecuteForStringResultAsync("SUBSCRIBE", ["foo"]); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "TYPE", "PUBSUB"); + + // SE.Redis spins up multiple connections, so we can kill more than 1 (but at least 1) connection + ClassicAssert.IsTrue((int)res >= 1); + + AssertNotConnected(targetConnection); + } + + // KILL by addr + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + var remoteEndpoint = GetFlagForSessionId(targetId, "addr", mainDB); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "ADDR", remoteEndpoint); + ClassicAssert.AreEqual(1, (int)res); + + AssertNotConnected(targetConnection); + } + + // KILL by laddr + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + var localEndpoint = GetFlagForSessionId(targetId, "laddr", mainDB); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "LADDR", localEndpoint); + + // SE.Redis spins up multiple connections, so we can kill more than 1 (but at least 1) connection + ClassicAssert.IsTrue((int)res >= 1); + + AssertNotConnected(targetConnection); + } + + // KILL by maxage + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + while (true) + { + var age = GetFlagForSessionId(targetId, "age", mainDB); + if (long.Parse(age) >= 2) + { + break; + } + + await Task.Delay(1_000); + } + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "MAXAGE", 1); + ClassicAssert.IsTrue((int)res >= 1); + + AssertNotConnected(targetConnection); + } + + // KILL by multiple + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + var addr = GetFlagForSessionId(targetId, "addr", mainDB); + + // Count of killed connections + var res = mainDB.Execute("CLIENT", "KILL", "ID", targetId, "MAXAGE", -1, "TYPE", "NORMAL", "SKIPME", "YES"); + ClassicAssert.AreEqual(1, (int)res); + + AssertNotConnected(targetConnection); + } + + // KILL without SKIPME + { + using var targetConnection = await ConnectAsync(); + + var targetId = await targetConnection.ExecuteForLongResultAsync("CLIENT", ["ID"]); + + try + { + _ = mainDB.Execute("CLIENT", "KILL", "TYPE", "NORMAL", "SKIPME", "NO"); + } + catch + { + // This will kill the SE.Redis connection, so depending on ordering an exception may be observed + } + + AssertNotConnected(targetConnection); + } + + // Grab a flag=value out of CLIENT LIST + static string GetFlagForSessionId(long sessionId, string flag, IDatabase db) + { + var list = (string)db.Execute("CLIENT", "LIST"); + var line = list.Split("\n").Single(l => l.Contains($"id={sessionId} ")); + + string flagValue = null; + + foreach (var flagPair in line.Split(" ")) + { + if (flagPair.StartsWith($"{flag}=")) + { + ClassicAssert.IsNull(flagValue, $"In {line}, found duplicate {flag}"); + + flagValue = flagPair[$"{flag}=".Length..]; + } + } + + ClassicAssert.NotNull(flagValue, $"In {line}, looking for {flag}"); + + return flagValue; + } + + // Create a GarnetClient that we'll try to kill later + static async Task ConnectAsync() + { + var client = TestUtils.GetGarnetClient(); + await client.ConnectAsync(); + + _ = await client.PingAsync(); + + ClassicAssert.IsTrue(client.IsConnected); + + return client; + } + + // Check that we really killed the connection backing a GarnetClient + static void AssertNotConnected(GarnetClient client) + { + // Force the issue by attempting a command + try + { + client.Ping(static (_, __) => { }); + } + catch { } + + ClassicAssert.IsFalse(client.IsConnected); + } + } + + [Test] + public void ClientKillErrors() + { + using var mainConnection = ConnectionMultiplexer.Connect(TestUtils.GetConfig()); + var mainDB = mainConnection.GetDatabase(0); + + // Errors that match Redis behavior + { + // No args + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL")); + ClassicAssert.AreEqual("ERR wrong number of arguments for 'CLIENT|KILL' command", exc.Message); + } + + // Old style, not a known client + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "foo")); + ClassicAssert.AreEqual("ERR No such client", exc.Message); + } + + // New style, bad filter + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "FOO", "bar")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + + // New style, ID not number + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "ID", "fizz")); + ClassicAssert.AreEqual("ERR client-id should be greater than 0", exc.Message); + } + + // New style, TYPE invalid + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "TYPE", "buzz")); + ClassicAssert.AreEqual("ERR Unknown client type 'buzz'", exc.Message); + } + + // New style, SKIPME invalid + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "SKIPME", "hello")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + + // New style, MAXAGE invalid + { + var exc = ClassicAssert.Throws(() => mainDB.Execute("CLIENT", "KILL", "MAXAGE", "world")); + ClassicAssert.AreEqual("ERR syntax error", exc.Message); + } + } + + // Because Redis behavior seems to diverge from its documentation, Garnet fails safe in these cases + { + AssertDuplicateDefinitionError("ID", () => mainDB.Execute("CLIENT", "KILL", "ID", "123", "ID", "456")); + AssertDuplicateDefinitionError("TYPE", () => mainDB.Execute("CLIENT", "KILL", "TYPE", "master", "TYPE", "normal")); + AssertDuplicateDefinitionError("USER", () => mainDB.Execute("CLIENT", "KILL", "USER", "foo", "USER", "bar")); + AssertDuplicateDefinitionError("ADDR", () => mainDB.Execute("CLIENT", "KILL", "ADDR", "123", "ADDR", "456")); + AssertDuplicateDefinitionError("LADDR", () => mainDB.Execute("CLIENT", "KILL", "LADDR", "123", "LADDR", "456")); + AssertDuplicateDefinitionError("SKIPME", () => mainDB.Execute("CLIENT", "KILL", "SKIPME", "YES", "SKIPME", "NO")); + AssertDuplicateDefinitionError("MAXAGE", () => mainDB.Execute("CLIENT", "KILL", "MAXAGE", "123", "MAXAGE", "456")); + } + + static void AssertDuplicateDefinitionError(string filter, TestDelegate shouldThrow) + { + var exc = ClassicAssert.Throws(shouldThrow); + ClassicAssert.AreEqual($"ERR Filter '{filter}' defined multiple times", exc.Message); + } + } + + /// + /// Check that list is non-empty, and has the minimum required fields. + /// + private static void AssertExpectedClientFields(string list) + { + var lines = list.Split("\n"); + ClassicAssert.IsTrue(lines.Length >= 1); + + foreach (var line in lines) + { + var flags = line.Split(" "); + AssertField(line, flags, "id"); + AssertField(line, flags, "addr"); + AssertField(line, flags, "laddr"); + AssertField(line, flags, "age"); + AssertField(line, flags, "flags"); + AssertField(line, flags, "resp"); + } + + // Check that a given flag is set + static void AssertField(string line, string[] fields, string name) + => ClassicAssert.AreEqual(1, fields.Count(f => f.StartsWith($"{name}=")), $"In {line}, expected single field {name}"); + } } } \ No newline at end of file diff --git a/test/Garnet.test/TestUtils.cs b/test/Garnet.test/TestUtils.cs index f6191a10bc..139794b9d4 100644 --- a/test/Garnet.test/TestUtils.cs +++ b/test/Garnet.test/TestUtils.cs @@ -9,7 +9,6 @@ using System.Net; using System.Net.NetworkInformation; using System.Net.Security; -using System.Reflection; using System.Security.Cryptography.X509Certificates; using System.Text; using System.Threading; @@ -49,9 +48,12 @@ internal static class TestUtils private static int procId = Process.GetCurrentProcess().Id; internal static string CustomRespCommandInfoJsonPath = "CustomRespCommandsInfo.json"; + internal static string CustomRespCommandDocsJsonPath = "CustomRespCommandsDocs.json"; private static bool CustomCommandsInfoInitialized; + private static bool CustomCommandsDocsInitialized; private static IReadOnlyDictionary RespCustomCommandsInfo; + private static IReadOnlyDictionary RespCustomCommandsDocs; internal static string AzureTestContainer { @@ -97,9 +99,25 @@ internal static bool TryGetCustomCommandsInfo(out IReadOnlyDictionary + /// Get command info for custom commands defined in custom commands json file + /// + /// Mapping between command name and command info + /// Logger + /// + internal static bool TryGetCustomCommandsDocs(out IReadOnlyDictionary customCommandsDocs, ILogger logger = null) + { + customCommandsDocs = default; + + if (!CustomCommandsDocsInitialized && !TryInitializeCustomCommandsDocs(logger)) return false; + + customCommandsDocs = RespCustomCommandsDocs; + return true; + } + private static bool TryInitializeCustomCommandsInfo(ILogger logger) { - if (!TryGetRespCommandsInfo(CustomRespCommandInfoJsonPath, logger, out var tmpCustomCommandsInfo)) + if (!TryGetRespCommandData(CustomRespCommandInfoJsonPath, logger, out var tmpCustomCommandsInfo)) return false; RespCustomCommandsInfo = tmpCustomCommandsInfo; @@ -107,20 +125,24 @@ private static bool TryInitializeCustomCommandsInfo(ILogger logger) return true; } - private static bool TryGetRespCommandsInfo(string resourcePath, ILogger logger, out IReadOnlyDictionary commandsInfo) + private static bool TryInitializeCustomCommandsDocs(ILogger logger) { - commandsInfo = default; - - var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.EmbeddedResource, null, Assembly.GetExecutingAssembly()); - var commandsInfoProvider = RespCommandsInfoProviderFactory.GetRespCommandsInfoProvider(); + if (!TryGetRespCommandData(CustomRespCommandDocsJsonPath, logger, out var tmpCustomCommandsDocs)) + return false; - var importSucceeded = commandsInfoProvider.TryImportRespCommandsInfo(resourcePath, - streamProvider, out var tmpCommandsInfo, logger); + RespCustomCommandsDocs = tmpCustomCommandsDocs; + CustomCommandsDocsInitialized = true; + return true; + } - if (!importSucceeded) return false; + private static bool TryGetRespCommandData(string resourcePath, ILogger logger, out IReadOnlyDictionary commandData) + where TData : class, IRespCommandData + { + var streamProvider = StreamProviderFactory.GetStreamProvider(FileLocationType.Local); + var commandsInfoProvider = RespCommandsDataProviderFactory.GetRespCommandsDataProvider(); - commandsInfo = tmpCommandsInfo; - return true; + return commandsInfoProvider.TryImportRespCommandsData(resourcePath, + streamProvider, out commandData, logger); } static bool IsAzuriteRunning() @@ -167,7 +189,7 @@ public static GarnetServer CreateGarnetServer( string defaultPassword = null, bool useAcl = false, // NOTE: Temporary until ACL is enforced as default string aclFile = null, - string objectStoreTotalMemorySize = default, + string objectStoreHeapMemorySize = default, string objectStoreIndexSize = "16k", string objectStoreIndexMaxSize = default, string indexSize = "1m", @@ -178,7 +200,8 @@ public static GarnetServer CreateGarnetServer( int indexResizeFrequencySecs = 60, IAuthenticationSettings authenticationSettings = null, bool enableLua = false, - ILogger logger = null) + ILogger logger = null, + IEnumerable loadModulePaths = null) { if (UseAzureStorage) IgnoreIfNotRunningAzureTests(); @@ -254,17 +277,18 @@ public static GarnetServer CreateGarnetServer( EnableScatterGatherGet = getSG, IndexResizeFrequencySecs = indexResizeFrequencySecs, ThreadPoolMinThreads = threadPoolMinThreads, + LoadModuleCS = loadModulePaths }; - if (!string.IsNullOrEmpty(objectStoreTotalMemorySize)) - opts.ObjectStoreTotalMemorySize = objectStoreTotalMemorySize; + if (!string.IsNullOrEmpty(objectStoreHeapMemorySize)) + opts.ObjectStoreHeapMemorySize = objectStoreHeapMemorySize; if (indexMaxSize != default) opts.IndexMaxSize = indexMaxSize; if (objectStoreIndexMaxSize != default) opts.ObjectStoreIndexMaxSize = objectStoreIndexMaxSize; if (lowMemory) { - opts.MemorySize = opts.ObjectStoreLogMemorySize = MemorySize == default ? "512" : MemorySize; + opts.MemorySize = opts.ObjectStoreLogMemorySize = MemorySize == default ? "1024" : MemorySize; opts.PageSize = opts.ObjectStorePageSize = PageSize == default ? "512" : PageSize; } @@ -333,7 +357,8 @@ public static GarnetServer[] CreateGarnetCluster( string aclFile = null, X509CertificateCollection certificates = null, ILoggerFactory loggerFactory = null, - AadAuthenticationSettings authenticationSettings = null) + AadAuthenticationSettings authenticationSettings = null, + int metricsSamplingFrequency = 0) { if (UseAzureStorage) IgnoreIfNotRunningAzureTests(); @@ -372,7 +397,8 @@ public static GarnetServer[] CreateGarnetCluster( aclFile: aclFile, certificates: certificates, logger: loggerFactory?.CreateLogger("GarnetServer"), - aadAuthenticationSettings: authenticationSettings); + aadAuthenticationSettings: authenticationSettings, + metricsSamplingFrequency: metricsSamplingFrequency); ClassicAssert.IsNotNull(opts); int iter = 0; @@ -417,6 +443,7 @@ public static GarnetServerOptions GetGarnetServerOptions( string aclFile = null, X509CertificateCollection certificates = null, AadAuthenticationSettings aadAuthenticationSettings = null, + int metricsSamplingFrequency = 0, ILogger logger = null) { if (UseAzureStorage) @@ -468,6 +495,7 @@ public static GarnetServerOptions GetGarnetServerOptions( MemorySize = "1g", GossipDelay = gossipDelay, EnableFastCommit = FastCommit, + MetricsSamplingFrequency = metricsSamplingFrequency, TlsOptions = UseTLS ? new GarnetTlsOptions( certFileName: certFile, certPassword: certPassword, @@ -504,7 +532,7 @@ public static GarnetServerOptions GetGarnetServerOptions( if (lowMemory) { - opts.MemorySize = opts.ObjectStoreLogMemorySize = MemorySize == default ? "512" : MemorySize; + opts.MemorySize = opts.ObjectStoreLogMemorySize = MemorySize == default ? "1024" : MemorySize; opts.PageSize = opts.ObjectStorePageSize = PageSize == default ? "512" : PageSize; } @@ -624,7 +652,7 @@ public static GarnetClientSession GetGarnetClientSession(bool useTLS = false, bo RemoteCertificateValidationCallback = ValidateServerCertificate, }; } - return new GarnetClientSession(Address, Port, sslOptions); + return new GarnetClientSession(Address, Port, new(), tlsOptions: sslOptions); } public static LightClientRequest CreateRequest(LightClient.OnResponseDelegateUnsafe onReceive = null, bool useTLS = false, CountResponseType countResponseType = CountResponseType.Tokens) diff --git a/test/Garnet.test/TransactionTests.cs b/test/Garnet.test/TransactionTests.cs index d73580682f..76e91799b1 100644 --- a/test/Garnet.test/TransactionTests.cs +++ b/test/Garnet.test/TransactionTests.cs @@ -202,7 +202,7 @@ public async Task SimpleWatchTest() await Task.Run(() => updateKey("key1", "value1_updated")); res = lightClientRequest.SendCommand("EXEC"); - expectedResponse = "$-1"; + expectedResponse = "*-1"; ClassicAssert.AreEqual(res.AsSpan().Slice(0, expectedResponse.Length).ToArray(), expectedResponse); // This one should Commit @@ -243,7 +243,7 @@ public async Task WatchNonExistentKey() await Task.Run(() => updateKey("key1", "value1")); res = lightClientRequest.SendCommand("EXEC"); - expectedResponse = "$-1"; + expectedResponse = "*-1"; ClassicAssert.AreEqual(res.AsSpan().Slice(0, expectedResponse.Length).ToArray(), expectedResponse); // This one should Commit @@ -289,7 +289,7 @@ public async Task WatchKeyFromDisk() await Task.Run(() => updateKey("key1", "value1_updated")); res = lightClientRequest.SendCommand("EXEC"); - expectedResponse = "$-1"; + expectedResponse = "*-1"; ClassicAssert.AreEqual(res.AsSpan().Slice(0, expectedResponse.Length).ToArray(), expectedResponse); // This one should Commit diff --git a/website/docs/commands/api-compatibility.md b/website/docs/commands/api-compatibility.md index af95155613..17757c002f 100644 --- a/website/docs/commands/api-compatibility.md +++ b/website/docs/commands/api-compatibility.md @@ -11,23 +11,29 @@ Note that this list is subject to change as we continue to expand our API comman ### Command Categories 1. [ACL](#acl) 2. [BITMAP](#bitmap) -3. [CLUSTER](#cluster) -4. [CONNECTION](#connection) -5. [FUNCTIONS](#functions) -6. [GENERIC](#generic) -7. [GEO](#geo) -8. [HASH](#hash) -9. [HYPERLOGLOG](#hyperloglog) -10. [KEYS](#keys) -11. [LIST](#list) -12. [PUB/SUB](#pubsub) -13. [SCRIPTING](#scripting) -14. [SERVER](#server) -15. [SET](#set) -16. [SORTED SET](#sorted-set) -17. [STREAM](#stream) -18. [STRING](#string) -10. [TRANSACTIONS](#transactions) +3. [CLIENT](#client) +4. [CLUSTER](#cluster) +5. [COMMAND](#command) +6. [CONNECTION](#connection) +7. [FUNCTIONS](#functions) +8. [GENERIC](#generic) +9. [GEO](#geo) +10. [HASH](#hash) +11. [HYPERLOGLOG](#hyperloglog) +12. [KEYS](#keys) +13. [LATENCY](#latency) +14. [LIST](#list) +15. [MEMORY](#memory) +16. [MODULE](#module) +17. [OBJECT](#object) +18. [PUB/SUB](#pubsub) +19. [SCRIPTING](#scripting) +20. [SERVER](#server) +21. [SET](#set) +22. [SORTED SET](#sorted-set) +23. [STREAM](#stream) +24. [STRING](#string) +25. [TRANSACTIONS](#transactions) ### Full Commands List @@ -40,6 +46,7 @@ Note that this list is subject to change as we continue to expand our API comman | | GETUSER | ➖ | | | | [LIST](acl.md#acl-list) | ➕ | | | | [LOAD](acl.md#acl-load) | ➕ | | +| | HELP | ➖ | | | | LOG | ➖ | | | | SAVE | ➖ | | | | [SETUSER](acl.md#acl-setuser) | ➕ | | @@ -53,9 +60,27 @@ Note that this list is subject to change as we continue to expand our API comman | | [BITPOS](analytics.md#bitpos) | ➕ | | | | [GETBIT](analytics.md#getbit) | ➕ | | | | [SETBIT](analytics.md#setbit) | ➕ | | +| **CLIENT** | CACHING | ➖ | | +| | GETNAME | ➖ | | +| | GETREDIR | ➖ | | +| | HELP | ➖ | | +| | [ID](client.md#client-id) | ➕ | | +| | [INFO](client.md#client-info) | ➕ | | +| | [KILL](client.md#client-kill) | ➕ | | +| | [LIST](client.md#client-list) | ➕ | | +| | NO-EVICT | ➖ | | +| | NO-TOUCH | ➖ | | +| | PAUSE | ➖ | | +| | REPLY | ➖ | | +| | SETINFO | ➖ | | +| | SETNAME | ➖ | | +| | TRACKING | ➖ | | +| | TRACKINGINFO | ➖ | | +| | UNBLOCK | ➖ | | +| | UNPAUSE | ➖ | | | **CLUSTER** | [ADDSLOTS](cluster.md#cluster-addslots) | ➕ | | | | [ADDSLOTSRANGE](cluster.md#cluster-addslotsrange) | ➕ | | -| | ASKING | ➕ | | +| | [ASKING](cluster.md#asking) | ➕ | | | | [BUMPEPOCH](cluster.md#cluster-bumpepoch) | ➕ | | | | COUNT-FAILURE-REPORTS | ➖ | | | | [COUNTKEYSINSLOT](cluster.md#cluster-countkeysinslot) | ➕ | | @@ -65,41 +90,57 @@ Note that this list is subject to change as we continue to expand our API comman | | FLUSHSLOTS | ➖ | | | | [FORGET](cluster.md#cluster-forget) | ➕ | | | | [GETKEYINSLOT](cluster.md#cluster-getkeysinslot) | ➕ | | -| | INFO | ➕ | | +| | [INFO](cluster.md#cluster-info) | ➕ | | | | [KEYSLOT](cluster.md#cluster-keyslot) | ➕ | | | | LINKS | ➖ | | | | [MEET](cluster.md#cluster-meet) | ➕ | | | | [MYID](cluster.md#cluster-myid) | ➕ | | +| | MYSHARDID | ➖ | | | | [NODES](cluster.md#cluster-nodes) | ➕ | | -| | READONLY | ➕ | | -| | READWRITE | ➕ | | +| | [READONLY](cluster.md#readonly) | ➕ | | +| | [READWRITE](cluster.md#readwrite) | ➕ | | | | [REPLICAS](cluster.md#cluster-replicas) | ➕ | | | | [REPLICATE](cluster.md#cluster-replicate) | ➕ | | -| | RESET | ➕ | | +| | [RESET](cluster.md#reset) | ➕ | | | | SAVECONFIG | ➖ | | | | [SET-CONFIG-EPOCH](cluster.md#cluster-set-config-epoch) | ➕ | | | | [SETSLOT](cluster.md#cluster-setslot) | ➕ | | +| | SHARDS | ➖ | | | | [SLAVES](cluster.md#slaves) | ➕ | | -| | SLOTS | ➕ | | +| | [SLOTS](cluster.md#cluster-slots) | ➕ | (deprecated) | +| **COMMAND** | [COMMAND](server.md#command) | ➕ | | +| | [COUNT](server.md#command-count) | ➕ | | +| | [DOCS](server.md#command-docs) | ➕ | | +| | GETKEYS | ➖ | | +| | GETKEYSANDFLAGS | ➖ | | +| | HELP | ➖ | | +| | [INFO](server.md#command-info) | ➕ | | +| | LIST | ➖ | | | **CONNECTION** | [AUTH](generic-commands.md#auth) | ➕ | | -| | CLIENT CACHING | ➖ | | | | [ECHO](generic-commands.md#echo) | ➕ | | | | [HELLO](generic-commands.md#hello) | ➕ | | | | [PING](generic-commands.md#ping) | ➕ | | | | [QUIT](generic-commands.md#quit) | ➕ | | -| | SELECT | ➕ | | +| | [SELECT](generic-commands.md#select) | ➕ | | | **FUNCTIONS** | FCALL | ➖ | | | | FCALL_RO | ➖ | | -| | FUNCTION | ➖ | -| **GENERIC** | OBJECT | ➖ | | -| | [PERSIST](generic-commands.md#persist) | ➕ | | +| | DELETE | ➖ | +| | DUMP | ➖ | +| | FLUSH | ➖ | +| | HELP | ➖ | +| | KILL | ➖ | +| | LIST | ➖ | +| | LOAD | ➖ | +| | RESTORE | ➖ | +| | STATS | ➖ | +| **GENERIC** | [PERSIST](generic-commands.md#persist) | ➕ | | | | [PEXPIRE](generic-commands.md#pexpire) | ➕ | | -| | PEXPIREAT | ➖ | | -| | PEXPIRETIME | ➖ | | +| | [PEXPIREAT](generic-commands.md#pexpireat) | ➕ | | +| | [PEXPIRETIME](generic-commands.md#pexpiretime) | ➕ | | | | [PTTL](generic-commands.md#pttl) | ➕ | | | | RANDOMKEY | ➖ | | | | [RENAME](generic-commands.md#rename) | ➕ | | -| | RENAMENX | ➖ | | +| | [RENAMENX](generic-commands.md#renamenx) | ➕ | | | | RESTORE | ➖ | | | | [SCAN](generic-commands.md#scan) | ➕ | | | | SORT | ➖ | | @@ -109,6 +150,7 @@ Note that this list is subject to change as we continue to expand our API comman | | [TYPE](generic-commands.md#type) | ➕ | | | | [UNLINK](generic-commands.md#unlink) | ➕ | | | | WAIT | ➖ | | +| | WAITAOF | ➖ | | | **GEO** | [GEOADD](data-structures.md#geoadd) | ➕ | | | | [GEODIST](data-structures.md#geodist) | ➕ | | | | [GEOHASH](data-structures.md#geohash) | ➕ | | @@ -116,10 +158,14 @@ Note that this list is subject to change as we continue to expand our API comman | | GEORADIUS | ➖ | | | | GEORADIUS_RO | ➖ | | | | GEORADIUSBYMEMBER | ➖ | | +| | GEORADIUSBYMEMBER_RO | ➖ | | | | [GEOSEARCH](data-structures.md#geosearch) | ➕ | Partially Implemented | | | GEOSEARCHSTORE | ➖ | | | **HASH** | [HDEL](data-structures.md#hdel) | ➕ | | | | [HEXISTS](data-structures.md#hexists) | ➕ | | +| | HEXPIRE | ➖ | | +| | HEXPIREAT | ➖ | | +| | HEXPIRETIME | ➖ | | | | [HGET](data-structures.md#hget) | ➕ | | | | [HGETALL](data-structures.md#hgetall) | ➕ | | | | [HINCRBY](data-structures.md#hincrby) | ➕ | | @@ -128,11 +174,17 @@ Note that this list is subject to change as we continue to expand our API comman | | [HLEN](data-structures.md#hlen) | ➕ | | | | [HMGET](data-structures.md#hmget) | ➕ | | | | [HMSET](data-structures.md#hmset) | ➕ | | +| | HPERSIST | ➖ | | +| | HPEXPIRE | ➖ | | +| | HPEXPIREAT | ➖ | | +| | HPEXPIRETIME | ➖ | | +| | HPTTL | ➖ | | | | [HRANDFIELD](data-structures.md#hrandfield) | ➕ | | -| | [HSCAN](data-structures.md#hscan) | ➕ | | +| | [HSCAN](data-structures.md#hscan) | ➕ | `NOVALUES` flag not yet implemented | | | [HSET](data-structures.md#hset) | ➕ | | | | [HSETNX](data-structures.md#hsetnx) | ➕ | | | | [HSTRLEN](data-structures.md#hstrlen) | ➕ | | +| | HTTL | ➖ | | | | [HVALS](data-structures.md#hvals) | ➕ | | | **HYPERLOGLOG** | [PFADD](analytics.md#pfadd) | ➕ | | | | [PFCOUNT](analytics.md#pfcount) | ➕ | | @@ -140,15 +192,22 @@ Note that this list is subject to change as we continue to expand our API comman | | [PFMERGE](analytics.md#pfmerge) | ➕ | | | | PFSELFTEST | ➖ | | | **KEYS** | COPY | ➖ | | -| | [DEL](generic-commands.md#del) | ➕ | | +| | [DEL](generic-commands.md#del) | ➕ | | | | DUMP | ➖ | | | | [EXISTS](generic-commands.md#exists) | ➕ | | | | [EXPIRE](generic-commands.md#expire) | ➕ | | -| | EXPIREAT | ➖ | | -| | EXPIRETIME | ➖ | | +| | [EXPIREAT](generic-commands.md#expireat) | ➕ | | +| | [EXPIRETIME](generic-commands.md#expiretime) | ➕ | | | | [KEYS](generic-commands.md#keys) | ➕ | | | | [MIGRATE](generic-commands.md#migrate) | ➕ | | | | MOVE | ➖ | | +| **LATENCY** | DOCTOR | ➖ | | +| | GRAPH | ➖ | | +| | HELP | ➖ | | +| | [HISTOGRAM](server.md#latency-histogram) | ➕ | | +| | HISTORY | ➖ | | +| | LATEST | ➖ | | +| | [RESET](server.md#latency-reset) | ➕ | | | **LIST** | [BLMOVE](data-structures.md#blmove) | ➕ | | | | BLMPOP | ➖ | | | | [BLPOP](data-structures.md#blpop) | ➕ | | @@ -160,7 +219,7 @@ Note that this list is subject to change as we continue to expand our API comman | | [LMOVE](data-structures.md#lmove) | ➕ | | | | LMPOP | ➖ | | | | [LPOP](data-structures.md#lpop) | ➕ | | -| | LPOS | ➖ | | +| | [LPOS](data-structures.md#lpos) | ➕ | | | | [LPUSH](data-structures.md#lpush) | ➕ | | | | [LPUSHX](data-structures.md#lpushx) | ➕ | | | | [LRANGE](data-structures.md#lrange) | ➕ | | @@ -171,54 +230,59 @@ Note that this list is subject to change as we continue to expand our API comman | | [RPOPLPUSH](data-structures.md#rpoplpush) | ➕ | | | | [RPUSH](data-structures.md#rpush) | ➕ | | | | [RPUSHX](data-structures.md#rpushx) | ➕ | | +| **MEMORY** | DOCTOR | ➖ | | +| | HELP | ➖ | | +| | MALLOC-STATS | ➖ | | +| | PURGE | ➖ | | +| | STATS | ➖ | | +| | [USAGE](server.md#memory-usage) | ➕ | | +| **MODULE** | HELP | ➖ | | +| | LIST | ➖ | | +| | LOAD | ➖ | | +| | LOADEX | ➖ | | +| | UNLOAD | ➖ | | +| **OBJECT** | ENCODING | ➖ | | +| | FREQ | ➖ | | +| | HELP | ➖ | | +| | IDLETIME | ➖ | | +| | REFCOUNT | ➖ | | | **PUB/SUB** | [PSUBSCRIBE](analytics.md#psubscribe) | ➕ | | | | [PUBLISH](analytics.md#publish) | ➕ | | | | PUBSUB CHANNELS | ➖ | | +| | PUBSUB HELP | ➖ | | | | PUBSUB NUMPAT | ➖ | | | | PUBSUB NUMSUB | ➖ | | +| | PUBSUB SHARDCHANNELS | ➖ | | +| | PUBSUB SHARDNUMSUB | ➖ | | | | [PUNSUBSCRIBE](analytics.md#punsubscribe) | ➕ | | | | [SUBSCRIBE](analytics.md#subscribe) | ➕ | | | | [UNSUBSCRIBE](analytics.md#unsubscribe) | ➕ | | -| **SCRIPTING** | EVAL | ➕ | | +| **SCRIPTING** | [EVAL](scripting-and-functions.md#eval) | ➕ | | | | EVAL_RO | ➖ | | -| | EVALSHA | ➕ | | +| | [EVALSHA](scripting-and-functions.md#evalsha) | ➕ | | | | EVALSHA_RO | ➖ | | | | SCRIPT DEBUG | ➖ | | -| | SCRIPT EXISTS | ➕ | | -| | SCRIPT FLUSH | ➕ | | +| | [SCRIPT EXISTS](scripting-and-functions.md#script-exists) | ➕ | | +| | [SCRIPT FLUSH](scripting-and-functions.md#script-flush) | ➕ | | +| | SCRIPT HELP | ➖ | | | | SCRIPT KILL | ➖ | | -| | SCRIPT LOAD | ➕ | | +| | [SCRIPT LOAD](scripting-and-functions.md#script-load) | ➕ | | | **SERVER** | ACL | ➖ | | | | BGREWRITEAOF | ➖ | | | | [BGSAVE](checkpoint.md#bgsave) | ➕ | | -| | [COMMAND](server.md#command) | ➕ | | -| | [COMMAND COUNT](server.md#command-count) | ➕ | | -| | COMMAND DOCS | ➖ | | -| | COMMAND GETKEYS | ➖ | | -| | COMMAND GETKEYSANDFLAGS | ➖ | | -| | [COMMAND INFO](server.md#command-info) | ➕ | | -| | COMMAND LIST | ➖ | | | | [COMMITAOF](server.md#commitaof) | ➕ | | | | [CONFIG GET](server.md#config-get) | ➕ | | +| | CONFIG HELP | ➖ | | +| | CONFIG RESETSTAT | ➖ | | +| | CONFIG REWRITE | ➖ | | | | [CONFIG SET](server.md#config-set) | ➕ | | | | [DBSIZE](server.md#dbsize) | ➕ | | +| | DEBUG | ➖ | Internal command | | | [FLUSHALL](server.md#flushall) | ➕ | | | | [FLUSHDB](server.md#flushdb) | ➕ | | | | [LASTSAVE](checkpoint.md#lastsave) | ➕ | | -| | LATENCY DOCTOR | ➖ | | -| | LATENCY GRAPH | ➖ | | -| | [LATENCY HISTOGRAM](server.md#latency-histogram) | ➕ | | -| | LATENCY HISTORY | ➖ | | -| | LATENCY LATEST | ➖ | | -| | [LATENCY RESET](server.md#latency-reset) | ➕ | | -| | LOLWUT | ➕ | | -| | MEMORY DOCTOR | ➖ | | -| | MEMORY MALLOC-STATS | ➖ | | -| | MEMORY PURGE | ➖ | | -| | MEMORY STATS | ➖ | | -| | [MEMORY USAGE](server.md#memory-usage) | ➕ | | -| | MODULE | ➖ | | -| | MONITOR | ➖ | | +| | LOLWUT | ➖ | | +| | [MONITOR](server.md#monitor) | ➕ | | | | PSYNC | ➖ | | | | REPLCONF | ➖ | | | | [REPLICAOF](server.md#replicaof) | ➕ | | @@ -227,7 +291,6 @@ Note that this list is subject to change as we continue to expand our API comman | | [SAVE](checkpoint.md#save) | ➕ | | | | SHUTDOWN | ➖ | | | | SLAVEOF | ➖ | | -| | SLOWLOG | ➖ | | | | SWAPDB | ➖ | | | | SYNC | ➖ | | | | [TIME](server.md#time) | ➕ | | @@ -238,17 +301,25 @@ Note that this list is subject to change as we continue to expand our API comman | | [SINTER](data-structures.md#sinter) | ➕ | | | | [SINTERSTORE](data-structures.md#sinterstore) | ➕ | | | | SINTERCARD | ➖ | | -| | SISMEMBER | ➕ | | +| | [SISMEMBER](data-structures.md#sismember) | ➕ | | | | [SMEMBERS](data-structures.md#smembers) | ➕ | | | | SMISMEMBER | ➖ | | | | SMOVE | ➖ | | | | [SPOP](data-structures.md#spop) | ➕ | | +| | SPUBLISH | ➖ | | | | [SRANDMEMBER](data-structures.md#srandmember) | ➕ | | | | [SREM](data-structures.md#srem) | ➕ | | | | [SSCAN](data-structures.md#sscan) | ➕ | | +| | SSUBSCRIBE | ➖ | | | | [SUNION](data-structures.md#sunion) | ➕ | | | | [SUNIONSTORE](data-structures.md#sunionstore) | ➕ | | +| | SUNSUBSCRIBE | ➖ | | +| **SLOWLOG** | GET | ➖ | | +| | HELP | ➖ | | +| | LEN | ➖ | | +| | RESET | ➖ | | | **SORTED SET** | BZPOP | ➖ | | +| | BZMPOP | ➖ | | | | BZPOPMAX | ➖ | | | | BZPOPMIN | ➖ | | | | [ZADD](data-structures.md#zadd) | ➕ | | @@ -277,7 +348,7 @@ Note that this list is subject to change as we continue to expand our API comman | | [ZREMRANGEBYSCORE](data-structures.md#zremrangebyscore) | ➕ | | | | [ZREVRANGE](data-structures.md#zrevrange) | ➕ | | | | ZREVRANGEBYLEX | ➖ | | -| | ZREVRANGEBYSCORE | ➖ | | +| | [ZREVRANGEBYSCORE](data-structures.md#zrevrangebyscore) | ➕ | | | | [ZREVRANK](data-structures.md#zrevrank) | ➕ | | | | [ZSCAN](data-structures.md#zscan) | ➕ | | | | [ZSCORE](data-structures.md#zscore) | ➕ | | @@ -288,8 +359,16 @@ Note that this list is subject to change as we continue to expand our API comman | | XAUTOCLAIM | ➖ | | | | XCLAIM | ➖ | | | | XDEL | ➖ | | -| | XGROUP | ➖ | | -| | XINFO | ➖ | | +| | XGROUP CREATE | ➖ | | +| | XGROUP CREATECONSUMER | ➖ | | +| | XGROUP DELCONSUMER | ➖ | | +| | XGROUP DESTROY | ➖ | | +| | XGROUP HELP | ➖ | | +| | XGROUP SETID | ➖ | | +| | XINFO CONSUMERS | ➖ | | +| | XINFO GROUPS | ➖ | | +| | XINFO HELP | ➖ | | +| | XINFO STREAM | ➖ | | | | XLEN | ➖ | | | | XPENDING | ➖ | | | | XRANGE | ➖ | | @@ -298,7 +377,7 @@ Note that this list is subject to change as we continue to expand our API comman | | XREVRANGE | ➖ | | | | XSETID | ➖ | | | | XTRIM | ➖ | | -| **STRING** | APPEND | ➕ | | +| **STRING** | [APPEND](raw-string.md#append) | ➕ | | | | [DECR](raw-string.md#decr) | ➕ | | | | [DECRBY](raw-string.md#decrby) | ➕ | | | | [GET](raw-string.md#get) | ➕ | | @@ -318,7 +397,7 @@ Note that this list is subject to change as we continue to expand our API comman | | [SET ... NX](raw-string.md#set) | ➕ | | | | [SETEX](raw-string.md#setex) | ➕ | | | | SETNX | ➖ | | -| | SETRANGE | ➕ | | +| | [SETRANGE](raw-string.md#setrange) | ➕ | | | | [STRLEN](raw-string.md#strlen) | ➕ | | | | SUBSTR | ➖ | | | **TRANSACTIONS** | [DISCARD](transactions.md#discard) | ➕ | | diff --git a/website/docs/commands/client.md b/website/docs/commands/client.md new file mode 100644 index 0000000000..37ae56621c --- /dev/null +++ b/website/docs/commands/client.md @@ -0,0 +1,93 @@ +--- +id: client-commands +sidebar_label: Client Management +title: Client Management +slug: client +--- + +### CLIENT ID + +#### Syntax + +```bash + CLIENT ID +``` + +The command just returns the ID of the current connection. Every connection ID has certain guarantees: + +1. It is never repeated, so if CLIENT ID returns the same number, the caller can be sure that the underlying client did not disconnect and reconnect the connection, but it is still the same connection. +1. The ID is monotonically incremental. If the ID of a connection is greater than the ID of another connection, it is guaranteed that the second connection was established with the server at a later time. + +#### Resp Reply + +Integer reply: the ID of the client. + +--- + +### CLIENT INFO + +#### Syntax + +```bash + CLIENT INFO +``` + +The command returns information and statistics about the current client connection in a mostly human readable format. + +#### Resp Reply + +Bulk string reply: a unique string for the current client, as described at the CLIENT LIST page. + +--- + +### CLIENT KILL + +#### Syntax + +```bash + CLIENT KILL ] | [USER username] | [ADDR ip:port] | + [LADDR ip:port] | [SKIPME ] | [MAXAGE maxage] + [[ID client-id] | [TYPE ] | [USER username] | [ADDR ip:port] | [LADDR ip:port] | + [SKIPME ] | [MAXAGE maxage] ...]>> +``` + +The CLIENT KILL command closes a given client connection. + +* CLIENT KILL addr:port. This kill the client matching the given address and port. +* CLIENT KILL ADDR ip:port. This kill the client matching the given address and port. +* CLIENT KILL LADDR ip:port. Kill all clients connected to specified local (bind) address. +* CLIENT KILL ID client-id. Allows to kill a client by its unique ID field. Client ID's are retrieved using the CLIENT LIST command. +* CLIENT KILL TYPE type, where type is one of normal, master, replica and pubsub. This closes the connections of all the clients in the specified class. Note that clients blocked into the MONITOR command are considered to belong to the normal class. +* CLIENT KILL USER username. Closes all the connections that are authenticated with the specified ACL username, however it returns an error if the username does not map to an existing ACL user. +* CLIENT KILL SKIPME yes/no. By default this option is set to yes, that is, the client calling the command will not get killed, however setting this option to no will have the effect of also killing the client calling the command. +* CLIENT KILL MAXAGE maxage. Closes all the connections that are older than the specified age, in seconds. + +#### Resp Reply + +One of the following: + +* Simple string reply: OK when called in 3 argument format and the connection has been closed. +* Integer reply: when called in filter/value format, the number of clients killed. + +--- + +### CLIENT LIST + +#### Syntax + +```bash + CLIENT LIST [TYPE ] + [ID client-id [client-id ...]] +``` + +The CLIENT LIST command returns information and statistics about the client connections server in a mostly human readable format. + +You can use one of the optional subcommands to filter the list. The TYPE type subcommand filters the list by clients' type, where type is one of normal, master, replica, and pubsub. + +#### Resp Reply + +Bulk string reply: information and statistics about client connections. + +--- \ No newline at end of file diff --git a/website/docs/commands/cluster.md b/website/docs/commands/cluster.md index 458317f051..854dd8a58e 100644 --- a/website/docs/commands/cluster.md +++ b/website/docs/commands/cluster.md @@ -248,6 +248,21 @@ An array of keys or empty. ---- +## CLUSTER INFO + +#### Syntax + +```bash + CLUSTER INFO +``` + +CLUSTER INFO provides INFO style information about Redis Cluster vital parameters. + +#### RESP Reply +Bulk string reply: A map between named fields and values in the form of : lines separated by newlines composed by the two bytes CRLF. + +---- + ## CLUSTER KEYSLOT #### Syntax @@ -611,6 +626,77 @@ Array reply: a nested list of hash ranges and node information grouped by shard. ---- +## CLUSTER SLOTS + +#### Syntax + +```bash + CLUSTER SLOTS +``` + +CLUSTER SLOTS returns details about which cluster slots map to which Redis instances. + +#### RESP Reply +Array reply: nested list of slot ranges with networking information. + +---- + +## ASKING + +#### Syntax + +```bash + ASKING +``` + +When a cluster client receives an -ASK redirect, the ASKING command is sent to the target node followed by the command which was redirected. This is normally done automatically by cluster clients. + +#### RESP Reply +Simple string reply: OK. + +---- + +## READONLY + +#### Syntax + +```bash + READONLY +``` + +Enables read queries for a connection to a Redis Cluster replica node. + +#### RESP Reply +Simple string reply: OK. + +---- + +## READWRITE +#### Syntax +```bash + READWRITE +``` +Disables read queries for a connection to a Redis Cluster replica node. + +#### RESP Reply +Simple string reply: OK. + +---- + +## RESET + +#### Syntax + +```bash + RESET +``` + +This command performs a full reset of the connection's server-side context, mimicking the effect of disconnecting and reconnecting again. + +#### RESP Reply +Simple string reply: RESET. + +---- diff --git a/website/docs/commands/data-structures.md b/website/docs/commands/data-structures.md index 922bd70a2c..4f0529e35b 100644 --- a/website/docs/commands/data-structures.md +++ b/website/docs/commands/data-structures.md @@ -319,6 +319,26 @@ By default, the command pops a single element from the beginning of the list. Wh --- +### LPOS + +#### Syntax + +```bash + LPOS key element [RANK rank] [COUNT num-matches] [MAXLEN len] +``` + +The command returns the index of matching elements inside a Redis list. By default, when no options are given, it will scan the list from head to tail, looking for the first match of "element". If the element is found, its index (the zero-based position in the list) is returned. Otherwise, if no match is found, nil is returned. + +#### Resp Reply + +Any of the following: + +* Null reply: if there is no matching element. +* Integer reply: an integer representing the matching element. +* Array reply: If the COUNT option is given, an array of integers representing the matching elements (or an empty array if there are no matches). + +--- + ### LPUSH #### Syntax @@ -487,7 +507,6 @@ Returns the set cardinality (number of elements) of the set stored at **key**. Returns all the members of the set value stored at **key**. - --- ### SPOP @@ -933,6 +952,18 @@ Apart from the reversed ordering, **ZREVRANGE** is similar to [ZRANGE](#zrange). --- +### ZREVRANGEBYSCORE + +#### Syntax + +```bash + ZREVRANGEBYSCORE key max min [WITHSCORES] [LIMIT offset count] +``` + +Returns all the elements in the sorted set at key with a score between max and min (including elements with score equal to max or min). + +--- + ### ZREVRANK {#zrevrank} #### Syntax diff --git a/website/docs/commands/generic-commands.md b/website/docs/commands/generic-commands.md index 8eecbfdede..59ebf9b933 100644 --- a/website/docs/commands/generic-commands.md +++ b/website/docs/commands/generic-commands.md @@ -99,6 +99,22 @@ Simple string reply: OK. --- +### SELECT + +#### Syntax + +```bash + SELECT +``` + +Select the Redis logical database having the specified zero-based numeric index. New connections always use the database 0. + +#### Resp Reply + +Simple string reply: OK. + +--- + ## Generic Commands ### DEL @@ -150,16 +166,65 @@ The EXPIRE command supports a set of options: * `GT` -- Set expiry only when the new expiry is greater than current one * `LT` -- Set expiry only when the new expiry is less than current one +The GT, LT and NX options are mutually exclusive. + #### Resp Reply One of the following: * Integer reply: 0 if the timeout was not set; for example, the key doesn't exist, or the operation was skipped because of the provided arguments. +* Integer reply: 1 if the timeout was set. + +--- + +### EXPIREAT +#### Syntax + +```bash + EXPIREAT key seconds [NX | XX | GT | LT] +``` + +Set a timeout on key using absolute Unix timestamp (seconds since January 1, 1970) in seconds. After the timestamp, the key will automatically be deleted. + +The EXPIREAT command supports a set of options: + +* `NX` -- Set expiry only when the key has no expiry +* `XX` -- Set expiry only when the key has an existing expiry +* `GT` -- Set expiry only when the new expiry is greater than current one +* `LT` -- Set expiry only when the new expiry is less than current one + +The GT, LT and NX options are mutually exclusive. + +#### Resp Reply + +One of the following: + +* Integer reply: 0 if the timeout was not set; for example, the key doesn't exist, or the operation was skipped because of the provided arguments. * Integer reply: 1 if the timeout was set. --- +### EXPIRETIME + +#### Syntax + +```bash + EXPIRETIME key +``` + +Returns the absolute Unix timestamp (since January 1, 1970) in seconds at which the given key will expire. + +#### Resp Reply + +One of the following: + +* Integer reply: Expiration Unix timestamp in milliseconds. +* Integer reply: -1 if the key exists but has no associated expiration time. +* Integer reply: -2 if the key does not exist. + +--- + ### KEYS #### Syntax @@ -226,8 +291,56 @@ One of the following: * Integer reply: 0 if key does not exist or does not have an associated timeout. * Integer reply: 1 if the timeout has been removed. +--- + +### PEXPIRETIME + +#### Syntax + +```bash + PEXPIRETIME key +``` + +Returns the absolute Unix timestamp (since January 1, 1970) in milliseconds at which the given key will expire. + +#### Resp Reply + +One of the following: + +* Integer reply: Expiration Unix timestamp in milliseconds. +* Integer reply: -1 if the key exists but has no associated expiration time. +* Integer reply: -2 if the key does not exist. + +--- + +### PEXPIREAT + +#### Syntax + +```bash + PEXPIREAT key seconds [NX | XX | GT | LT] +``` + +Set a timeout on key using absolute Unix timestamp (seconds since January 1, 1970) in milliseconds. After the timestamp, the key will automatically be deleted. + +The PEXPIREAT command supports a set of options: + +* `NX` -- Set expiry only when the key has no expiry +* `XX` -- Set expiry only when the key has an existing expiry +* `GT` -- Set expiry only when the new expiry is greater than current one +* `LT` -- Set expiry only when the new expiry is less than current one + +The GT, LT and NX options are mutually exclusive. + +#### Resp Reply + +One of the following: + +* Integer reply: 0 if the timeout was not set; for example, the key doesn't exist, or the operation was skipped because of the provided arguments. +* Integer reply: 1 if the timeout was set. --- + ### PTTL #### Syntax @@ -263,6 +376,26 @@ Renames key to newkey. It returns an error when key does not exist. If newkey al Simple string reply: OK. --- + +### RENAMENX + +#### Syntax + +```bash + RENAME key newkey +``` + +Renames key to newkey if newkey does not yet exist. It returns an error when key does not exist. + +#### Resp Reply + +One of the following: + +* Integer reply: 1 if key was renamed to newkey. +* Integer reply: 0 if newkey already exists. + +--- + ### SCAN #### Syntax diff --git a/website/docs/commands/raw-string.md b/website/docs/commands/raw-string.md index 15f152d616..54e88e1dcf 100644 --- a/website/docs/commands/raw-string.md +++ b/website/docs/commands/raw-string.md @@ -6,6 +6,22 @@ title: Raw String # Raw String Commands +### APPEND + +#### Syntax + +```bash + APPEND key value +``` + +If key already exists and is a string, this command appends the value at the end of the string. If key does not exist it is created and set as an empty string. + +#### RESP Reply + +Integer reply: the length of the string after the append operation. + +--- + ### DECR #### Syntax @@ -244,3 +260,21 @@ Returns the length of the string value stored at **key**. #### Resp Reply * Integer reply: the length of the string stored at key, or 0 when the key does not exist. + +--- + +### SETRANGE + +#### Syntax + +```bash + SETRANGE key offset value +``` + +Overwrites part of the string stored at key, starting at the specified offset, for the entire length of value. + +#### Resp Reply + +* Integer reply: the length of the string after it was modified by the command. + +--- diff --git a/website/docs/commands/scripting-and-functions.md b/website/docs/commands/scripting-and-functions.md new file mode 100644 index 0000000000..b4837bbe79 --- /dev/null +++ b/website/docs/commands/scripting-and-functions.md @@ -0,0 +1,86 @@ +--- +id: scripting-commands +sidebar_label: Scripting and functions +title: Scripting and functions +slug: scripting +--- + +### EVAL + +#### Syntax + +```bash + EVAL script numkeys [key [key ...]] [arg [arg ...]] +``` + +Invoke the execution of a server-side Lua script. + +#### Resp Reply + +The return value depends on the script that was executed. + +--- + +### EVALSHA + +#### Syntax + +```bash + EVALSHA sha1 numkeys [key [key ...]] [arg [arg ...]] +``` + +Evaluate a script from the server's cache by its SHA1 digest. + +#### Resp Reply + +The return value depends on the script that was executed. + +--- + +### SCRIPT EXISTS + +#### Syntax + +```bash + SCRIPT EXISTS sha1 [sha1 ...] +``` + +Returns information about the existence of the scripts in the script cache. + +#### Resp Reply + +Array reply: an array of integers that correspond to the specified SHA1 digest arguments. + +--- + +### SCRIPT FLUSH + +#### Syntax + +```bash + SCRIPT FLUSH [ASYNC | SYNC] +``` + +Flush the Lua scripts cache. + +#### Resp Reply + +Simple string reply: OK. + +--- + +### SCRIPT LOAD + +#### Syntax + +```bash + SCRIPT LOAD script +``` + +Load a script into the scripts cache, without executing it. + +#### Resp Reply + +Bulk string reply: the SHA1 digest of the script added into the script cache. + +--- diff --git a/website/docs/commands/server.md b/website/docs/commands/server.md index 2d3a1cc867..8983a4a664 100644 --- a/website/docs/commands/server.md +++ b/website/docs/commands/server.md @@ -33,6 +33,22 @@ Returns Integer reply of number of total commands in this Garnet server. Integer reply: the number of commands returned by COMMAND. +--- +### COMMAND DOCS +#### Syntax + +```bash +COMMAND DOCS [command-name [command-name ...]] +``` + +Return documentary information about commands. + +By default, the reply includes all of the server's commands. You can use the optional command-name argument to specify the names of one or more commands. + +#### Resp Reply + +Array reply: a map, as a flattened array, where each key is a command name, and each value is the documentary information. + --- ### COMMAND INFO #### Syntax @@ -224,4 +240,20 @@ The TIME command returns the current server time as a two items lists: a Unix ti Array reply: specifically, a two-element array consisting of the Unix timestamp in seconds and the microseconds' count. ---- \ No newline at end of file +--- + +### MONITOR + +#### Syntax + +```bash +MONITOR +``` + +MONITOR is a debugging command that streams back every command processed by the Redis server. It can help in understanding what is happening to the database. + +#### Resp Reply + +Non-standard return value. Dumps the received commands in an infinite flow. + +--- diff --git a/website/docs/getting-started/configuration.md b/website/docs/getting-started/configuration.md index b1eae64365..04c7836491 100644 --- a/website/docs/getting-started/configuration.md +++ b/website/docs/getting-started/configuration.md @@ -74,14 +74,14 @@ For all available command line settings, run `GarnetServer.exe -h` or `GarnetSer | **MemorySize** | ```-m```
```--memory``` | ```string``` | Memory size | Total log memory used in bytes (rounds down to power of 2) | | **PageSize** | ```-p```
```--page``` | ```string``` | Memory size | Size of each page in bytes (rounds down to power of 2) | | **SegmentSize** | ```-s```
```--segment``` | ```string``` | Memory size | Size of each log segment in bytes on disk (rounds down to power of 2) | -| **IndexSize** | ```-i```
```--index``` | ```string``` | Memory size | Size of hash index in bytes (rounds down to power of 2) | +| **IndexSize** | ```-i```
```--index``` | ```string``` | Memory size | Start size of hash index in bytes (rounds down to power of 2) | | **IndexMaxSize** | ```--index-max-size``` | ```string``` | Memory size | Max size of hash index in bytes (rounds down to power of 2) | | **MutablePercent** | ```--mutable-percent``` | ```int``` | | Percentage of log memory that is kept mutable | -| **ObjectStoreTotalMemorySize** | ```--obj-total-memory``` | ```string``` | Memory size | Total object store log memory used including heap memory in bytes | -| **ObjectStoreLogMemorySize** | ```--obj-memory``` | ```string``` | Memory size | Object store log memory used in bytes excluding heap memory | +| **ObjectStoreHeapMemorySize** | ```--obj-heap-memory``` | ```string``` | Memory size | Object store heap memory size in bytes (Sum of size taken up by all object instances in the heap) | +| **ObjectStoreLogMemorySize** | ```--obj-log-memory``` | ```string``` | Memory size | Object store log memory used in bytes (Size of only the log with references to heap objects, excludes size of heap memory consumed by the objects themselves referred to from the log) | | **ObjectStorePageSize** | ```--obj-page``` | ```string``` | Memory size | Size of each object store page in bytes (rounds down to power of 2) | | **ObjectStoreSegmentSize** | ```--obj-segment``` | ```string``` | Memory size | Size of each object store log segment in bytes on disk (rounds down to power of 2) | -| **ObjectStoreIndexSize** | ```--obj-index``` | ```string``` | Memory size | Size of object store hash index in bytes (rounds down to power of 2) | +| **ObjectStoreIndexSize** | ```--obj-index``` | ```string``` | Memory size | Start size of object store hash index in bytes (rounds down to power of 2) | | **ObjectStoreIndexMaxSize** | ```--obj-index-max-size``` | ```string``` | Memory size | Max size of object store hash index in bytes (rounds down to power of 2) | | **ObjectStoreMutablePercent** | ```--obj-mutable-percent``` | ```int``` | | Percentage of object store log memory that is kept mutable | | **EnableStorageTier** | ```--storage-tier``` | ```bool``` | | Enable tiering of records (hybrid log) to storage, to support a larger-than-memory store. Use --logdir to specify storage directory. | diff --git a/website/docs/getting-started/memory.md b/website/docs/getting-started/memory.md index 38d2c98681..49e4fa7c42 100644 --- a/website/docs/getting-started/memory.md +++ b/website/docs/getting-started/memory.md @@ -78,7 +78,7 @@ However, the log memory is handled differently, as described below. In case of the object store, the hybrid log holds _references_ to keys and values (which are objects), rather than the actual keys and values themselves. The memory occupied by the object store log is configured using -`ObjectStoreLogMemorySize` (`--obj-memory`). However, this parameter only controls the number of records +`ObjectStoreLogMemorySize` (`--obj-log-memory`). However, this parameter only controls the number of records in the object store, where each record consists of: * An 8-byte header, called `RecordInfo`, which holds metadata and the logical address of the previous entry in a record chain. * An 8-byte reference to the key object, which is a byte array on heap (byte[]) @@ -90,13 +90,12 @@ setting `ObjectStoreLogMemorySize` to S merely implies that you can hold at most memory. This means, of course, that we need to track the total memory using a different mechanism. For this, Garnet -exposes a configuration called `ObjectStoreTotalMemorySize` (`--obj-total-memory`) which represents total object -store log memory used, including the hybrid log and the heap memory in bytes. You can use this parameter +exposes a configuration called `ObjectStoreHeapMemorySize` (`--obj-heap-memory`) which represents the heap memory +used by key byte arrays and the `IGarnetObject` instances in bytes. You can use this parameter in combination with the `--obj-log-memory` to control the total memory used by the object store. To summarize, the total space occupied by the object store is the sum of: * Object store index size (and overflow buckets), as before -* `ObjectStoreTotalMemorySize` - -with `ObjectStoreLogMemorySize` used to control the maximum _number_ of records in memory. +* `ObjectStoreLogMemorySize` (`--obj-log-memory`) which controls the maximum _number_ of records in memory. +* `ObjectStoreHeapMemorySize` (`--obj-heap-memory`) which controls the total heap size occupied by the objects. diff --git a/website/package.json b/website/package.json index e327baf958..263a89e60e 100644 --- a/website/package.json +++ b/website/package.json @@ -17,18 +17,18 @@ "@docusaurus/core": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0", "@docusaurus/theme-mermaid": "^3.4.0", - "@easyops-cn/docusaurus-search-local": "^0.44.2", + "@easyops-cn/docusaurus-search-local": "^0.44.5", "@mdx-js/react": "^3.0.0", "clsx": "^2.1.1", "docusaurus-plugin-clarity": "^2.1.0", "docusaurus-theme-github-codeblock": "^2.0.2", - "prism-react-renderer": "^2.1.0", + "prism-react-renderer": "^2.4.0", "react": "^18.3.1", "react-dom": "^18.3.1" }, "devDependencies": { - "@docusaurus/module-type-aliases": "3.4.0", - "@docusaurus/types": "3.4.0" + "@docusaurus/module-type-aliases": "3.5.2", + "@docusaurus/types": "3.5.2" }, "browserslist": { "production": [ diff --git a/website/sidebars.js b/website/sidebars.js index 697f6586a9..ccca441189 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -20,7 +20,7 @@ const sidebars = { {type: 'category', label: 'Welcome', collapsed: false, items: ["welcome/intro", "welcome/news", "welcome/features", "welcome/releases", "welcome/compatibility", "welcome/roadmap", "welcome/faq", "welcome/about-us"]}, {type: 'category', label: 'Getting Started', items: ["getting-started/build", "getting-started/configuration", "getting-started/memory", "getting-started/security", "getting-started/compaction"]}, {type: 'category', label: 'Benchmarking', items: ["benchmarking/overview", "benchmarking/results-resp-bench", "benchmarking/resp-bench"]}, - {type: 'category', label: 'Commands', items: ["commands/overview", "commands/api-compatibility", "commands/raw-string", "commands/generic-commands", "commands/analytics-commands", "commands/data-structures", "commands/server-commands", "commands/checkpoint-commands", "commands/transactions-commands", "commands/cluster", "commands/acl-commands"]}, + {type: 'category', label: 'Commands', items: ["commands/overview", "commands/api-compatibility", "commands/raw-string", "commands/generic-commands", "commands/analytics-commands", "commands/data-structures", "commands/server-commands", "commands/client-commands", "commands/checkpoint-commands", "commands/transactions-commands", "commands/cluster", "commands/acl-commands", "commands/scripting-commands"]}, {type: 'category', label: 'Server Extensions', items: ["extensions/overview", "extensions/raw-strings", "extensions/objects", "extensions/transactions", "extensions/procedure", "extensions/module"]}, {type: 'category', label: 'Cluster Mode', items: ["cluster/overview", "cluster/replication", "cluster/key-migration"]}, {type: 'category', label: 'Developer Guide', items: ["dev/onboarding", "dev/code-structure", "dev/configuration", "dev/network", "dev/processing", "dev/garnet-api", diff --git a/website/yarn.lock b/website/yarn.lock index 144b9aea6a..b9589fbad7 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -29,114 +29,167 @@ resolved "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz" integrity sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ== -"@algolia/cache-browser-local-storage@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.22.1.tgz" - integrity sha512-Sw6IAmOCvvP6QNgY9j+Hv09mvkvEIDKjYW8ow0UDDAxSXy664RBNQk3i/0nt7gvceOJ6jGmOTimaZoY1THmU7g== - dependencies: - "@algolia/cache-common" "4.22.1" - -"@algolia/cache-common@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.22.1.tgz" - integrity sha512-TJMBKqZNKYB9TptRRjSUtevJeQVXRmg6rk9qgFKWvOy8jhCPdyNZV1nB3SKGufzvTVbomAukFR8guu/8NRKBTA== - -"@algolia/cache-in-memory@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.22.1.tgz" - integrity sha512-ve+6Ac2LhwpufuWavM/aHjLoNz/Z/sYSgNIXsinGofWOysPilQZPUetqLj8vbvi+DHZZaYSEP9H5SRVXnpsNNw== - dependencies: - "@algolia/cache-common" "4.22.1" - -"@algolia/client-account@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.22.1.tgz" - integrity sha512-k8m+oegM2zlns/TwZyi4YgCtyToackkOpE+xCaKCYfBfDtdGOaVZCM5YvGPtK+HGaJMIN/DoTL8asbM3NzHonw== - dependencies: - "@algolia/client-common" "4.22.1" - "@algolia/client-search" "4.22.1" - "@algolia/transporter" "4.22.1" - -"@algolia/client-analytics@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.22.1.tgz" - integrity sha512-1ssi9pyxyQNN4a7Ji9R50nSdISIumMFDwKNuwZipB6TkauJ8J7ha/uO60sPJFqQyqvvI+px7RSNRQT3Zrvzieg== - dependencies: - "@algolia/client-common" "4.22.1" - "@algolia/client-search" "4.22.1" - "@algolia/requester-common" "4.22.1" - "@algolia/transporter" "4.22.1" - -"@algolia/client-common@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.22.1.tgz" - integrity sha512-IvaL5v9mZtm4k4QHbBGDmU3wa/mKokmqNBqPj0K7lcR8ZDKzUorhcGp/u8PkPC/e0zoHSTvRh7TRkGX3Lm7iOQ== - dependencies: - "@algolia/requester-common" "4.22.1" - "@algolia/transporter" "4.22.1" - -"@algolia/client-personalization@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.22.1.tgz" - integrity sha512-sl+/klQJ93+4yaqZ7ezOttMQ/nczly/3GmgZXJ1xmoewP5jmdP/X/nV5U7EHHH3hCUEHeN7X1nsIhGPVt9E1cQ== - dependencies: - "@algolia/client-common" "4.22.1" - "@algolia/requester-common" "4.22.1" - "@algolia/transporter" "4.22.1" - -"@algolia/client-search@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.22.1.tgz" - integrity sha512-yb05NA4tNaOgx3+rOxAmFztgMTtGBi97X7PC3jyNeGiwkAjOZc2QrdZBYyIdcDLoI09N0gjtpClcackoTN0gPA== - dependencies: - "@algolia/client-common" "4.22.1" - "@algolia/requester-common" "4.22.1" - "@algolia/transporter" "4.22.1" +"@algolia/cache-browser-local-storage@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.24.0.tgz" + integrity sha512-t63W9BnoXVrGy9iYHBgObNXqYXM3tYXCjDSHeNwnsc324r4o5UiVKUiAB4THQ5z9U5hTj6qUvwg/Ez43ZD85ww== + dependencies: + "@algolia/cache-common" "4.24.0" + +"@algolia/cache-common@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.24.0.tgz" + integrity sha512-emi+v+DmVLpMGhp0V9q9h5CdkURsNmFC+cOS6uK9ndeJm9J4TiqSvPYVu+THUP8P/S08rxf5x2P+p3CfID0Y4g== + +"@algolia/cache-in-memory@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.24.0.tgz" + integrity sha512-gDrt2so19jW26jY3/MkFg5mEypFIPbPoXsQGQWAi6TrCPsNOSEYepBMPlucqWigsmEy/prp5ug2jy/N3PVG/8w== + dependencies: + "@algolia/cache-common" "4.24.0" + +"@algolia/client-account@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.24.0.tgz" + integrity sha512-adcvyJ3KjPZFDybxlqnf+5KgxJtBjwTPTeyG2aOyoJvx0Y8dUQAEOEVOJ/GBxX0WWNbmaSrhDURMhc+QeevDsA== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-analytics@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.24.0.tgz" + integrity sha512-y8jOZt1OjwWU4N2qr8G4AxXAzaa8DBvyHTWlHzX/7Me1LX8OayfgHexqrsL4vSBcoMmVw2XnVW9MhL+Y2ZDJXg== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-common@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.24.0.tgz" + integrity sha512-bc2ROsNL6w6rqpl5jj/UywlIYC21TwSSoFHKl01lYirGMW+9Eek6r02Tocg4gZ8HAw3iBvu6XQiM3BEbmEMoiA== + dependencies: + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-common@5.4.1": + version "5.4.1" + resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.4.1.tgz" + integrity sha512-IffPD+CETiR8YJMVC1lcjnhETLpJ2L0ORZCbbRvwo/S11D1j/keR7AqKVMn4TseRJCfjmBFOcFrC+m4sXjyQWA== + +"@algolia/client-personalization@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.24.0.tgz" + integrity sha512-l5FRFm/yngztweU0HdUzz1rC4yoWCFo3IF+dVIVTfEPg906eZg5BOd1k0K6rZx5JzyyoP4LdmOikfkfGsKVE9w== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-search@>= 4.9.1 < 6": + version "5.4.1" + resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.4.1.tgz" + integrity sha512-nCgWY2p0tZgBqJKmA5E6B3VW+7uqxi1Orf88zNWOihJBRFeOV932pzG4vGrX9l0+p0o/vJabYxuomO35rEt5dw== + dependencies: + "@algolia/client-common" "5.4.1" + "@algolia/requester-browser-xhr" "5.4.1" + "@algolia/requester-fetch" "5.4.1" + "@algolia/requester-node-http" "5.4.1" + +"@algolia/client-search@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.24.0.tgz" + integrity sha512-uRW6EpNapmLAD0mW47OXqTP8eiIx5F6qN9/x/7HHO6owL3N1IXqydGwW5nhDFBrV+ldouro2W1VX3XlcUXEFCA== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" "@algolia/events@^4.0.1": version "4.0.1" resolved "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz" integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ== -"@algolia/logger-common@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.22.1.tgz" - integrity sha512-OnTFymd2odHSO39r4DSWRFETkBufnY2iGUZNrMXpIhF5cmFE8pGoINNPzwg02QLBlGSaLqdKy0bM8S0GyqPLBg== +"@algolia/logger-common@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.24.0.tgz" + integrity sha512-LLUNjkahj9KtKYrQhFKCzMx0BY3RnNP4FEtO+sBybCjJ73E8jNdaKJ/Dd8A/VA4imVHP5tADZ8pn5B8Ga/wTMA== -"@algolia/logger-console@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.22.1.tgz" - integrity sha512-O99rcqpVPKN1RlpgD6H3khUWylU24OXlzkavUAMy6QZd1776QAcauE3oP8CmD43nbaTjBexZj2nGsBH9Tc0FVA== +"@algolia/logger-console@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.24.0.tgz" + integrity sha512-X4C8IoHgHfiUROfoRCV+lzSy+LHMgkoEEU1BbKcsfnV0i0S20zyy0NLww9dwVHUWNfPPxdMU+/wKmLGYf96yTg== dependencies: - "@algolia/logger-common" "4.22.1" + "@algolia/logger-common" "4.24.0" -"@algolia/requester-browser-xhr@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.22.1.tgz" - integrity sha512-dtQGYIg6MteqT1Uay3J/0NDqD+UciHy3QgRbk7bNddOJu+p3hzjTRYESqEnoX/DpEkaNYdRHUKNylsqMpgwaEw== +"@algolia/recommend@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.24.0.tgz" + integrity sha512-P9kcgerfVBpfYHDfVZDvvdJv0lEoCvzNlOy2nykyt5bK8TyieYyiD0lguIJdRZZYGre03WIAFf14pgE+V+IBlw== dependencies: - "@algolia/requester-common" "4.22.1" + "@algolia/cache-browser-local-storage" "4.24.0" + "@algolia/cache-common" "4.24.0" + "@algolia/cache-in-memory" "4.24.0" + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/logger-console" "4.24.0" + "@algolia/requester-browser-xhr" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/requester-node-http" "4.24.0" + "@algolia/transporter" "4.24.0" -"@algolia/requester-common@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.22.1.tgz" - integrity sha512-dgvhSAtg2MJnR+BxrIFqlLtkLlVVhas9HgYKMk2Uxiy5m6/8HZBL40JVAMb2LovoPFs9I/EWIoFVjOrFwzn5Qg== +"@algolia/requester-browser-xhr@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.24.0.tgz" + integrity sha512-Z2NxZMb6+nVXSjF13YpjYTdvV3032YTBSGm2vnYvYPA6mMxzM3v5rsCiSspndn9rzIW4Qp1lPHBvuoKJV6jnAA== + dependencies: + "@algolia/requester-common" "4.24.0" + +"@algolia/requester-browser-xhr@5.4.1": + version "5.4.1" + resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.4.1.tgz" + integrity sha512-J6+YfU+maR0nIbsYRHoq0UpneilX97hrZzPuuvSoBojQmPo8PeCXKGeT/F0D8uFI6G4CMTKEPGmQYrC9IpCbcQ== + dependencies: + "@algolia/client-common" "5.4.1" + +"@algolia/requester-common@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.24.0.tgz" + integrity sha512-k3CXJ2OVnvgE3HMwcojpvY6d9kgKMPRxs/kVohrwF5WMr2fnqojnycZkxPoEg+bXm8fi5BBfFmOqgYztRtHsQA== + +"@algolia/requester-fetch@5.4.1": + version "5.4.1" + resolved "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.4.1.tgz" + integrity sha512-AO/C1pqqpIS8p2IsfM5x92S+UBKkcIen5dHfMEh1rnV0ArWDreeqrtxMD2A+6AjQVwYeZNy56w7o7PVIm6mc8g== + dependencies: + "@algolia/client-common" "5.4.1" -"@algolia/requester-node-http@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.22.1.tgz" - integrity sha512-JfmZ3MVFQkAU+zug8H3s8rZ6h0ahHZL/SpMaSasTCGYR5EEJsCc8SI5UZ6raPN2tjxa5bxS13BRpGSBUens7EA== +"@algolia/requester-node-http@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.24.0.tgz" + integrity sha512-JF18yTjNOVYvU/L3UosRcvbPMGT9B+/GQWNWnenIImglzNVGpyzChkXLnrSf6uxwVNO6ESGu6oN8MqcGQcjQJw== dependencies: - "@algolia/requester-common" "4.22.1" + "@algolia/requester-common" "4.24.0" -"@algolia/transporter@4.22.1": - version "4.22.1" - resolved "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.22.1.tgz" - integrity sha512-kzWgc2c9IdxMa3YqA6TN0NW5VrKYYW/BELIn7vnLyn+U/RFdZ4lxxt9/8yq3DKV5snvoDzzO4ClyejZRdV3lMQ== +"@algolia/requester-node-http@5.4.1": + version "5.4.1" + resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.4.1.tgz" + integrity sha512-2Y3vffc91egwFxz0SjXFEH4q8nvlNJHcz+0//NaWItRU68AvD+3aI/j66STPjkLQOC0Ku6ckA9ChhbOVfrv+Uw== dependencies: - "@algolia/cache-common" "4.22.1" - "@algolia/logger-common" "4.22.1" - "@algolia/requester-common" "4.22.1" + "@algolia/client-common" "5.4.1" + +"@algolia/transporter@4.24.0": + version "4.24.0" + resolved "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.24.0.tgz" + integrity sha512-86nI7w6NzWxd1Zp9q3413dRshDqAzSbsQjhcDhPIatEFiZrL1/TjnHL8S7jVKFePlIMzDsZWXAXwXzcok9c5oA== + dependencies: + "@algolia/cache-common" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/requester-common" "4.24.0" "@ampproject/remapping@^2.2.0": version "2.3.0" @@ -154,46 +207,12 @@ "@babel/highlight" "^7.23.4" chalk "^2.4.2" -"@babel/code-frame@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.7.tgz#882fd9e09e8ee324e496bd040401c6f046ef4465" - integrity sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA== - dependencies: - "@babel/highlight" "^7.24.7" - picocolors "^1.0.0" - "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.23.5": version "7.23.5" resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz" integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== -"@babel/compat-data@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.24.7.tgz#d23bbea508c3883ba8251fb4164982c36ea577ed" - integrity sha512-qJzAIcv03PyaWqxRgO4mSU3lihncDT296vnyuE2O8uA4w3UHWI4S3hgeZd1L8W1Bft40w9JxJ2b412iDUFFRhw== - -"@babel/core@^7.21.3": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.24.7.tgz#b676450141e0b52a3d43bc91da86aa608f950ac4" - integrity sha512-nykK+LEK86ahTkX/3TgauT0ikKoNCfKHEaZYTUVupJdTLzGNvrblu4u6fa7DhZONAltdf8e662t/abY8idrd/g== - dependencies: - "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.24.7" - "@babel/generator" "^7.24.7" - "@babel/helper-compilation-targets" "^7.24.7" - "@babel/helper-module-transforms" "^7.24.7" - "@babel/helpers" "^7.24.7" - "@babel/parser" "^7.24.7" - "@babel/template" "^7.24.7" - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - convert-source-map "^2.0.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.3" - semver "^6.3.1" - -"@babel/core@^7.23.3": +"@babel/core@^7.0.0", "@babel/core@^7.0.0-0", "@babel/core@^7.0.0-0 || ^8.0.0-0 <8.0.0", "@babel/core@^7.12.0", "@babel/core@^7.13.0", "@babel/core@^7.21.3", "@babel/core@^7.23.3", "@babel/core@^7.4.0 || ^8.0.0-0 <8.0.0": version "7.24.0" resolved "https://registry.npmjs.org/@babel/core/-/core-7.24.0.tgz" integrity sha512-fQfkg0Gjkza3nf0c7/w6Xf34BW4YvzNfACRLmmb7XRLa6XHdR+K9AlJlxneFfWYf6uhOzuzZVTjF/8KfndZANw== @@ -224,16 +243,6 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.24.7.tgz#1654d01de20ad66b4b4d99c135471bc654c55e6d" - integrity sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA== - dependencies: - "@babel/types" "^7.24.7" - "@jridgewell/gen-mapping" "^0.3.5" - "@jridgewell/trace-mapping" "^0.3.25" - jsesc "^2.5.1" - "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz" @@ -241,13 +250,6 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-annotate-as-pure@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz#5373c7bc8366b12a033b4be1ac13a206c6656aab" - integrity sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg== - dependencies: - "@babel/types" "^7.24.7" - "@babel/helper-builder-binary-assignment-operator-visitor@^7.22.15": version "7.22.15" resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz" @@ -255,14 +257,6 @@ dependencies: "@babel/types" "^7.22.15" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz#37d66feb012024f2422b762b9b2a7cfe27c7fba3" - integrity sha512-xZeCVVdwb4MsDBkkyZ64tReWYrLRHlMN72vP7Bdm3OUOuyFZExhsHUUnuWnm2/XOlAJzR0LfPpB56WXZn0X/lA== - dependencies: - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-compilation-targets@^7.22.15", "@babel/helper-compilation-targets@^7.22.6", "@babel/helper-compilation-targets@^7.23.6": version "7.23.6" resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz" @@ -274,17 +268,6 @@ lru-cache "^5.1.1" semver "^6.3.1" -"@babel/helper-compilation-targets@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.7.tgz#4eb6c4a80d6ffeac25ab8cd9a21b5dfa48d503a9" - integrity sha512-ctSdRHBi20qWOfy27RUb4Fhp07KSJ3sXcuSvTrXrc4aG8NSYDo1ici3Vhg9bg69y5bj0Mr1lh0aeEgTvc12rMg== - dependencies: - "@babel/compat-data" "^7.24.7" - "@babel/helper-validator-option" "^7.24.7" - browserslist "^4.22.2" - lru-cache "^5.1.1" - semver "^6.3.1" - "@babel/helper-create-class-features-plugin@^7.22.15", "@babel/helper-create-class-features-plugin@^7.23.6": version "7.24.0" resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.0.tgz" @@ -300,21 +283,6 @@ "@babel/helper-split-export-declaration" "^7.22.6" semver "^6.3.1" -"@babel/helper-create-class-features-plugin@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.7.tgz#2eaed36b3a1c11c53bdf80d53838b293c52f5b3b" - integrity sha512-kTkaDl7c9vO80zeX1rJxnuRpEsD5tA81yh11X1gQo+PhSti3JS+7qeZo9U4RHobKRiFPKaGK3svUAeb8D0Q7eg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-function-name" "^7.24.7" - "@babel/helper-member-expression-to-functions" "^7.24.7" - "@babel/helper-optimise-call-expression" "^7.24.7" - "@babel/helper-replace-supers" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" - "@babel/helper-split-export-declaration" "^7.24.7" - semver "^6.3.1" - "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.15", "@babel/helper-create-regexp-features-plugin@^7.22.5": version "7.22.15" resolved "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz" @@ -324,15 +292,6 @@ regexpu-core "^5.3.1" semver "^6.3.1" -"@babel/helper-create-regexp-features-plugin@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.24.7.tgz#be4f435a80dc2b053c76eeb4b7d16dd22cfc89da" - integrity sha512-03TCmXy2FtXJEZfbXDTSqq1fRJArk7lX9DOFC/47VthYcxyIOx+eXQmdo6DOQvrbpIix+KfXwvuXdFDZHxt+rA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - regexpu-core "^5.3.1" - semver "^6.3.1" - "@babel/helper-define-polyfill-provider@^0.5.0": version "0.5.0" resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.5.0.tgz" @@ -355,29 +314,11 @@ lodash.debounce "^4.0.8" resolve "^1.14.2" -"@babel/helper-define-polyfill-provider@^0.6.2": - version "0.6.2" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz#18594f789c3594acb24cfdb4a7f7b7d2e8bd912d" - integrity sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ== - dependencies: - "@babel/helper-compilation-targets" "^7.22.6" - "@babel/helper-plugin-utils" "^7.22.5" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - "@babel/helper-environment-visitor@^7.22.20": version "7.22.20" resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz" integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== -"@babel/helper-environment-visitor@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz#4b31ba9551d1f90781ba83491dd59cf9b269f7d9" - integrity sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ== - dependencies: - "@babel/types" "^7.24.7" - "@babel/helper-function-name@^7.22.5", "@babel/helper-function-name@^7.23.0": version "7.23.0" resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz" @@ -386,14 +327,6 @@ "@babel/template" "^7.22.15" "@babel/types" "^7.23.0" -"@babel/helper-function-name@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz#75f1e1725742f39ac6584ee0b16d94513da38dd2" - integrity sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA== - dependencies: - "@babel/template" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz" @@ -401,13 +334,6 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-hoist-variables@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz#b4ede1cde2fd89436397f30dc9376ee06b0f25ee" - integrity sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ== - dependencies: - "@babel/types" "^7.24.7" - "@babel/helper-member-expression-to-functions@^7.22.15", "@babel/helper-member-expression-to-functions@^7.23.0": version "7.23.0" resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz" @@ -415,14 +341,6 @@ dependencies: "@babel/types" "^7.23.0" -"@babel/helper-member-expression-to-functions@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.7.tgz#67613d068615a70e4ed5101099affc7a41c5225f" - integrity sha512-LGeMaf5JN4hAT471eJdBs/GK1DoYIJ5GCtZN/EsL6KUiiDZOvO/eKE11AMZJa2zP4zk4qe9V2O/hxAmkRc8p6w== - dependencies: - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-module-imports@^7.22.15": version "7.22.15" resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz" @@ -430,14 +348,6 @@ dependencies: "@babel/types" "^7.22.15" -"@babel/helper-module-imports@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz#f2f980392de5b84c3328fc71d38bd81bbb83042b" - integrity sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA== - dependencies: - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-module-transforms@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz" @@ -449,17 +359,6 @@ "@babel/helper-split-export-declaration" "^7.22.6" "@babel/helper-validator-identifier" "^7.22.20" -"@babel/helper-module-transforms@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.24.7.tgz#31b6c9a2930679498db65b685b1698bfd6c7daf8" - integrity sha512-1fuJEwIrp+97rM4RWdO+qrRsZlAeL1lQJoPqtCYWv0NL115XM93hIH4CSRln2w52SqvmY5hqdtauB6QFCDiZNQ== - dependencies: - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-module-imports" "^7.24.7" - "@babel/helper-simple-access" "^7.24.7" - "@babel/helper-split-export-declaration" "^7.24.7" - "@babel/helper-validator-identifier" "^7.24.7" - "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz" @@ -467,21 +366,9 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-optimise-call-expression@^7.24.7": +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.24.0", "@babel/helper-plugin-utils@^7.24.7", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz#8b0a0456c92f6b323d27cfd00d1d664e76692a0f" - integrity sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A== - dependencies: - "@babel/types" "^7.24.7" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.24.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.24.0" - resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.0.tgz" - integrity sha512-9cUznXMG0+FxRuJfvL82QlTqIzhVW9sL0KjMPHhAOOvpQGL8QtdxnBKILjBqxlHyliz0yCa1G903ZXI/FuHy2w== - -"@babel/helper-plugin-utils@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.7.tgz#98c84fe6fe3d0d3ae7bfc3a5e166a46844feb2a0" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.7.tgz" integrity sha512-Rq76wjt7yz9AAc1KnlRKNAi/dMSVWgDRx43FHoJEbcYU6xOWaE2dVPwcdTukJrjxS65GITyfbvEYHvkirZ6uEg== "@babel/helper-remap-async-to-generator@^7.22.20": @@ -493,15 +380,6 @@ "@babel/helper-environment-visitor" "^7.22.20" "@babel/helper-wrap-function" "^7.22.20" -"@babel/helper-remap-async-to-generator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.24.7.tgz#b3f0f203628522713849d49403f1a414468be4c7" - integrity sha512-9pKLcTlZ92hNZMQfGCHImUpDOlAgkkpqalWEeftW5FBya75k8Li2ilerxkM/uBEj01iBZXcCIB/bwvDYgWyibA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-wrap-function" "^7.24.7" - "@babel/helper-replace-supers@^7.22.20": version "7.22.20" resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz" @@ -511,15 +389,6 @@ "@babel/helper-member-expression-to-functions" "^7.22.15" "@babel/helper-optimise-call-expression" "^7.22.5" -"@babel/helper-replace-supers@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.24.7.tgz#f933b7eed81a1c0265740edc91491ce51250f765" - integrity sha512-qTAxxBM81VEyoAY0TtLrx1oAEJc09ZK67Q9ljQToqCnA+55eNwCORaxlKyu+rNfX86o8OXRUSNUnrtsAZXM9sg== - dependencies: - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-member-expression-to-functions" "^7.24.7" - "@babel/helper-optimise-call-expression" "^7.24.7" - "@babel/helper-simple-access@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz" @@ -527,14 +396,6 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-simple-access@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz#bcade8da3aec8ed16b9c4953b74e506b51b5edb3" - integrity sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg== - dependencies: - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz" @@ -542,14 +403,6 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-skip-transparent-expression-wrappers@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz#5f8fa83b69ed5c27adc56044f8be2b3ea96669d9" - integrity sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ== - dependencies: - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helper-split-export-declaration@^7.22.6": version "7.22.6" resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz" @@ -557,43 +410,21 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz#83949436890e07fa3d6873c61a96e3bbf692d856" - integrity sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA== - dependencies: - "@babel/types" "^7.24.7" - "@babel/helper-string-parser@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz" integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== -"@babel/helper-string-parser@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz#4d2d0f14820ede3b9807ea5fc36dfc8cd7da07f2" - integrity sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg== - "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== -"@babel/helper-validator-identifier@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz#75b889cfaf9e35c2aaf42cf0d72c8e91719251db" - integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w== - "@babel/helper-validator-option@^7.22.15", "@babel/helper-validator-option@^7.23.5": version "7.23.5" resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz" integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== -"@babel/helper-validator-option@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.24.7.tgz#24c3bb77c7a425d1742eec8fb433b5a1b38e62f6" - integrity sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw== - "@babel/helper-wrap-function@^7.22.20": version "7.22.20" resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz" @@ -603,16 +434,6 @@ "@babel/template" "^7.22.15" "@babel/types" "^7.22.19" -"@babel/helper-wrap-function@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.24.7.tgz#52d893af7e42edca7c6d2c6764549826336aae1f" - integrity sha512-N9JIYk3TD+1vq/wn77YnJOqMtfWhNewNE+DJV4puD2X7Ew9J4JvrzrFDfTfyv5EgEXVy9/Wt8QiOErzEmv5Ifw== - dependencies: - "@babel/helper-function-name" "^7.24.7" - "@babel/template" "^7.24.7" - "@babel/traverse" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/helpers@^7.24.0": version "7.24.0" resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.0.tgz" @@ -622,14 +443,6 @@ "@babel/traverse" "^7.24.0" "@babel/types" "^7.24.0" -"@babel/helpers@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.24.7.tgz#aa2ccda29f62185acb5d42fb4a3a1b1082107416" - integrity sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg== - dependencies: - "@babel/template" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/highlight@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz" @@ -639,34 +452,11 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/highlight@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.7.tgz#a05ab1df134b286558aae0ed41e6c5f731bf409d" - integrity sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw== - dependencies: - "@babel/helper-validator-identifier" "^7.24.7" - chalk "^2.4.2" - js-tokens "^4.0.0" - picocolors "^1.0.0" - "@babel/parser@^7.24.0": version "7.24.0" resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.24.0.tgz" integrity sha512-QuP/FxEAzMSjXygs8v4N9dvdXzEHN4W1oF3PxuWAtPo08UdM17u89RDMgjLn/mlc56iM0HlLmVkO/wgR+rDgHg== -"@babel/parser@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.24.7.tgz#9a5226f92f0c5c8ead550b750f5608e766c8ce85" - integrity sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw== - -"@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.7.tgz#fd059fd27b184ea2b4c7e646868a9a381bbc3055" - integrity sha512-TiT1ss81W80eQsN+722OaeQMY/G4yTb4G9JrqeiDADs3N8lbPMGldWi9x8tyqCW5NLx1Jh2AvkE6r6QvEltMMQ== - dependencies: - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.23.3.tgz" @@ -674,13 +464,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.7.tgz#468096ca44bbcbe8fcc570574e12eb1950e18107" - integrity sha512-unaQgZ/iRu/By6tsjMZzpeBZjChYfLYry6HrEXPoz3KmfF0sVBQ1l8zKMQ4xRGLWVsjuvB8nQfjNP/DcfEOCsg== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.23.3.tgz" @@ -690,15 +473,6 @@ "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" "@babel/plugin-transform-optional-chaining" "^7.23.3" -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz#e4eabdd5109acc399b38d7999b2ef66fc2022f89" - integrity sha512-+izXIbke1T33mY4MSNnrqhPXDz01WYhEf3yF5NbnUtkiNnm+XBZJl3kNfoK6NKmYlz/D07+l2GWVK/QfDkNCuQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" - "@babel/plugin-transform-optional-chaining" "^7.24.7" - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.23.7": version "7.23.7" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.23.7.tgz" @@ -707,14 +481,6 @@ "@babel/helper-environment-visitor" "^7.22.20" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.7.tgz#71b21bb0286d5810e63a1538aa901c58e87375ec" - integrity sha512-utA4HuR6F4Vvcr+o4DnjL8fCOlgRFGbeeBEGNg3ZTrLFw6VWG5XmUrvcQ0FjIYMU2ST4XcR2Wsp7t9qOAPnxMg== - dependencies: - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": version "7.21.0-placeholder-for-preset-env.2" resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz" @@ -762,13 +528,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-import-assertions@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.7.tgz#2a0b406b5871a20a841240586b1300ce2088a778" - integrity sha512-Ec3NRUMoi8gskrkBe3fNmEQfxDvY8bgfQpz6jlk/41kX9eUjvpyqWU7PBP/pLAvMaSQjbMNKJmvX57jP+M6bPg== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-import-attributes@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.23.3.tgz" @@ -776,13 +535,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-import-attributes@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz#b4f9ea95a79e6912480c4b626739f86a076624ca" - integrity sha512-hbX+lKKeUMGihnK8nvKqmXBInriT3GVjzXKFriV3YC6APGxMbP8RZNFwy91+hocLXq90Mta+HshoB31802bb8A== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-import-meta@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz" @@ -804,13 +556,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-jsx@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz#39a1fa4a7e3d3d7f34e2acc6be585b718d30e02d" - integrity sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" @@ -874,13 +619,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-typescript@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz#58d458271b4d3b6bb27ee6ac9525acbb259bad1c" - integrity sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz" @@ -896,13 +634,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-arrow-functions@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.7.tgz#4f6886c11e423bd69f3ce51dbf42424a5f275514" - integrity sha512-Dt9LQs6iEY++gXUwY03DNFat5C2NbO48jj+j/bSAz6b3HgPs39qcPiYt77fDObIcFwj3/C2ICX9YMwGflUoSHQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-async-generator-functions@^7.23.9": version "7.23.9" resolved "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.23.9.tgz" @@ -913,16 +644,6 @@ "@babel/helper-remap-async-to-generator" "^7.22.20" "@babel/plugin-syntax-async-generators" "^7.8.4" -"@babel/plugin-transform-async-generator-functions@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.7.tgz#7330a5c50e05181ca52351b8fd01642000c96cfd" - integrity sha512-o+iF77e3u7ZS4AoAuJvapz9Fm001PuD2V3Lp6OSE4FYQke+cSewYtnek+THqGRWyQloRCyvWL1OkyfNEl9vr/g== - dependencies: - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-remap-async-to-generator" "^7.24.7" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-transform-async-to-generator@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.23.3.tgz" @@ -932,15 +653,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-remap-async-to-generator" "^7.22.20" -"@babel/plugin-transform-async-to-generator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz#72a3af6c451d575842a7e9b5a02863414355bdcc" - integrity sha512-SQY01PcJfmQ+4Ash7NE+rpbLFbmqA2GPIgqzxfFTL4t1FKRq4zTms/7htKpoCUI9OcFYgzqfmCdH53s6/jn5fA== - dependencies: - "@babel/helper-module-imports" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-remap-async-to-generator" "^7.24.7" - "@babel/plugin-transform-block-scoped-functions@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.23.3.tgz" @@ -948,13 +660,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoped-functions@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz#a4251d98ea0c0f399dafe1a35801eaba455bbf1f" - integrity sha512-yO7RAz6EsVQDaBH18IDJcMB1HnrUn2FJ/Jslc/WtPPWcjhpUJXU/rjbwmluzp7v/ZzWcEhTMXELnnsz8djWDwQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-block-scoping@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.4.tgz" @@ -962,13 +667,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.7.tgz#42063e4deb850c7bd7c55e626bf4e7ab48e6ce02" - integrity sha512-Nd5CvgMbWc+oWzBsuaMcbwjJWAcp5qzrbg69SZdHSP7AMY0AbWFqFO0WTFCA1jxhMCwodRwvRec8k0QUbZk7RQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-class-properties@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.23.3.tgz" @@ -977,14 +675,6 @@ "@babel/helper-create-class-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-class-properties@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.7.tgz#256879467b57b0b68c7ddfc5b76584f398cd6834" - integrity sha512-vKbfawVYayKcSeSR5YYzzyXvsDFWU2mD8U5TFeXtbCPLFUqe7GyCgvO6XDHzje862ODrOwy6WCPmKeWHbCFJ4w== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-class-static-block@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.23.4.tgz" @@ -994,15 +684,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-class-static-block" "^7.14.5" -"@babel/plugin-transform-class-static-block@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz#c82027ebb7010bc33c116d4b5044fbbf8c05484d" - integrity sha512-HMXK3WbBPpZQufbMG4B46A90PkuuhN9vBCb5T8+VAHqvAqvcLi+2cKoukcpmUYkszLhScU3l1iudhrks3DggRQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-transform-classes@^7.23.8": version "7.23.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.23.8.tgz" @@ -1017,20 +698,6 @@ "@babel/helper-split-export-declaration" "^7.22.6" globals "^11.1.0" -"@babel/plugin-transform-classes@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.7.tgz#4ae6ef43a12492134138c1e45913f7c46c41b4bf" - integrity sha512-CFbbBigp8ln4FU6Bpy6g7sE8B/WmCmzvivzUC6xDAdWVsjYTXijpuuGJmYkAaoWAzcItGKT3IOAbxRItZ5HTjw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - "@babel/helper-compilation-targets" "^7.24.7" - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-function-name" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-replace-supers" "^7.24.7" - "@babel/helper-split-export-declaration" "^7.24.7" - globals "^11.1.0" - "@babel/plugin-transform-computed-properties@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.23.3.tgz" @@ -1039,14 +706,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/template" "^7.22.15" -"@babel/plugin-transform-computed-properties@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz#4cab3214e80bc71fae3853238d13d097b004c707" - integrity sha512-25cS7v+707Gu6Ds2oY6tCkUwsJ9YIDbggd9+cu9jzzDgiNq7hR/8dkzxWfKWnTic26vsI3EsCXNd4iEB6e8esQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/template" "^7.24.7" - "@babel/plugin-transform-destructuring@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz" @@ -1054,13 +713,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.7.tgz#a097f25292defb6e6cc16d6333a4cfc1e3c72d9e" - integrity sha512-19eJO/8kdCQ9zISOf+SEUJM/bAUIsvY3YDnXZTupUCQ8LgrWnsG/gFB9dvXqdXnRXMAM8fvt7b0CBKQHNGy1mw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-dotall-regex@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.23.3.tgz" @@ -1069,14 +721,6 @@ "@babel/helper-create-regexp-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-dotall-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz#5f8bf8a680f2116a7207e16288a5f974ad47a7a0" - integrity sha512-ZOA3W+1RRTSWvyqcMJDLqbchh7U4NRGqwRfFSVbOLS/ePIP4vHB5e8T8eXcuqyN1QkgKyj5wuW0lcS85v4CrSw== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-duplicate-keys@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.23.3.tgz" @@ -1084,13 +728,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-duplicate-keys@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz#dd20102897c9a2324e5adfffb67ff3610359a8ee" - integrity sha512-JdYfXyCRihAe46jUIliuL2/s0x0wObgwwiGxw/UbgJBr20gQBThrokO4nYKgWkD7uBaqM7+9x5TU7NkExZJyzw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-dynamic-import@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.23.4.tgz" @@ -1099,14 +736,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-dynamic-import" "^7.8.3" -"@babel/plugin-transform-dynamic-import@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz#4d8b95e3bae2b037673091aa09cd33fecd6419f4" - integrity sha512-sc3X26PhZQDb3JhORmakcbvkeInvxz+A8oda99lj7J60QRuPZvNAk9wQlTBS1ZynelDrDmTU4pw1tyc5d5ZMUg== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-transform-exponentiation-operator@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.23.3.tgz" @@ -1115,14 +744,6 @@ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-exponentiation-operator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz#b629ee22645f412024297d5245bce425c31f9b0d" - integrity sha512-Rqe/vSc9OYgDajNIK35u7ot+KeCoetqQYFXM4Epf7M7ez3lWlOjrDjrwMei6caCVhfdw+mIKD4cgdGNy5JQotQ== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-export-namespace-from@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.23.4.tgz" @@ -1131,14 +752,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" -"@babel/plugin-transform-export-namespace-from@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz#176d52d8d8ed516aeae7013ee9556d540c53f197" - integrity sha512-v0K9uNYsPL3oXZ/7F9NNIbAj2jv1whUEtyA6aujhekLs56R++JDQuzRcP2/z4WX5Vg/c5lE9uWZA0/iUoFhLTA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-transform-for-of@^7.23.6": version "7.23.6" resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.23.6.tgz" @@ -1147,14 +760,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" -"@babel/plugin-transform-for-of@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz#f25b33f72df1d8be76399e1b8f3f9d366eb5bc70" - integrity sha512-wo9ogrDG1ITTTBsy46oGiN1dS9A7MROBTcYsfS8DtsImMkHk9JXJ3EWQM6X2SUw4x80uGPlwj0o00Uoc6nEE3g== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" - "@babel/plugin-transform-function-name@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.23.3.tgz" @@ -1164,15 +769,6 @@ "@babel/helper-function-name" "^7.23.0" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-function-name@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.7.tgz#6d8601fbffe665c894440ab4470bc721dd9131d6" - integrity sha512-U9FcnA821YoILngSmYkW6FjyQe2TyZD5pHt4EVIhmcTkrJw/3KqcrRSxuOo5tFZJi7TE19iDyI1u+weTI7bn2w== - dependencies: - "@babel/helper-compilation-targets" "^7.24.7" - "@babel/helper-function-name" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-json-strings@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.23.4.tgz" @@ -1181,14 +777,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-json-strings" "^7.8.3" -"@babel/plugin-transform-json-strings@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz#f3e9c37c0a373fee86e36880d45b3664cedaf73a" - integrity sha512-2yFnBGDvRuxAaE/f0vfBKvtnvvqU8tGpMHqMNpTN2oWMKIR3NqFkjaAgGwawhqK/pIN2T3XdjGPdaG0vDhOBGw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-transform-literals@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.23.3.tgz" @@ -1196,13 +784,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-literals@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.7.tgz#36b505c1e655151a9d7607799a9988fc5467d06c" - integrity sha512-vcwCbb4HDH+hWi8Pqenwnjy+UiklO4Kt1vfspcQYFhJdpthSnW8XvWGyDZWKNVrVbVViI/S7K9PDJZiUmP2fYQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-logical-assignment-operators@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.23.4.tgz" @@ -1211,14 +792,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" -"@babel/plugin-transform-logical-assignment-operators@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz#a58fb6eda16c9dc8f9ff1c7b1ba6deb7f4694cb0" - integrity sha512-4D2tpwlQ1odXmTEIFWy9ELJcZHqrStlzK/dAOWYyxX3zT0iXQB6banjgeOJQXzEc4S0E0a5A+hahxPaEFYftsw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-transform-member-expression-literals@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.23.3.tgz" @@ -1226,13 +799,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-member-expression-literals@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz#3b4454fb0e302e18ba4945ba3246acb1248315df" - integrity sha512-T/hRC1uqrzXMKLQ6UCwMT85S3EvqaBXDGf0FaMf4446Qx9vKwlghvee0+uuZcDUCZU5RuNi4781UQ7R308zzBw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-modules-amd@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.23.3.tgz" @@ -1241,14 +807,6 @@ "@babel/helper-module-transforms" "^7.23.3" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-amd@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz#65090ed493c4a834976a3ca1cde776e6ccff32d7" - integrity sha512-9+pB1qxV3vs/8Hdmz/CulFB8w2tuu6EB94JZFsjdqxQokwGa9Unap7Bo2gGBGIvPmDIVvQrom7r5m/TCDMURhg== - dependencies: - "@babel/helper-module-transforms" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-modules-commonjs@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.3.tgz" @@ -1258,15 +816,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-simple-access" "^7.22.5" -"@babel/plugin-transform-modules-commonjs@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.7.tgz#9fd5f7fdadee9085886b183f1ad13d1ab260f4ab" - integrity sha512-iFI8GDxtevHJ/Z22J5xQpVqFLlMNstcLXh994xifFwxxGslr2ZXXLWgtBeLctOD63UFDArdvN6Tg8RFw+aEmjQ== - dependencies: - "@babel/helper-module-transforms" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-simple-access" "^7.24.7" - "@babel/plugin-transform-modules-systemjs@^7.23.9": version "7.23.9" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.23.9.tgz" @@ -1277,16 +826,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-identifier" "^7.22.20" -"@babel/plugin-transform-modules-systemjs@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.7.tgz#f8012316c5098f6e8dee6ecd58e2bc6f003d0ce7" - integrity sha512-GYQE0tW7YoaN13qFh3O1NCY4MPkUiAH3fiF7UcV/I3ajmDKEdG3l+UOcbAm4zUE3gnvUU+Eni7XrVKo9eO9auw== - dependencies: - "@babel/helper-hoist-variables" "^7.24.7" - "@babel/helper-module-transforms" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-validator-identifier" "^7.24.7" - "@babel/plugin-transform-modules-umd@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.23.3.tgz" @@ -1295,14 +834,6 @@ "@babel/helper-module-transforms" "^7.23.3" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-umd@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz#edd9f43ec549099620df7df24e7ba13b5c76efc8" - integrity sha512-3aytQvqJ/h9z4g8AsKPLvD4Zqi2qT+L3j7XoFFu1XBlZWEl2/1kWnhmAbxpLgPrHSY0M6UA02jyTiwUVtiKR6A== - dependencies: - "@babel/helper-module-transforms" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz" @@ -1311,14 +842,6 @@ "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-named-capturing-groups-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz#9042e9b856bc6b3688c0c2e4060e9e10b1460923" - integrity sha512-/jr7h/EWeJtk1U/uz2jlsCioHkZk1JJZVcc8oQsJ1dUlaJD83f4/6Zeh2aHt9BIFokHIsSeDfhUmju0+1GPd6g== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-new-target@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.23.3.tgz" @@ -1326,13 +849,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-new-target@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz#31ff54c4e0555cc549d5816e4ab39241dfb6ab00" - integrity sha512-RNKwfRIXg4Ls/8mMTza5oPF5RkOW8Wy/WgMAp1/F1yZ8mMbtwXW+HDoJiOsagWrAhI5f57Vncrmr9XeT4CVapA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-nullish-coalescing-operator@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.23.4.tgz" @@ -1341,14 +857,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" -"@babel/plugin-transform-nullish-coalescing-operator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz#1de4534c590af9596f53d67f52a92f12db984120" - integrity sha512-Ts7xQVk1OEocqzm8rHMXHlxvsfZ0cEF2yomUqpKENHWMF4zKk175Y4q8H5knJes6PgYad50uuRmt3UJuhBw8pQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-transform-numeric-separator@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.23.4.tgz" @@ -1357,14 +865,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-transform-numeric-separator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz#bea62b538c80605d8a0fac9b40f48e97efa7de63" - integrity sha512-e6q1TiVUzvH9KRvicuxdBTUj4AdKSRwzIyFFnfnezpCfP2/7Qmbb8qbU2j7GODbl4JMkblitCQjKYUaX/qkkwA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-transform-object-rest-spread@^7.24.0": version "7.24.0" resolved "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.0.tgz" @@ -1376,16 +876,6 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-transform-parameters" "^7.23.3" -"@babel/plugin-transform-object-rest-spread@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz#d13a2b93435aeb8a197e115221cab266ba6e55d6" - integrity sha512-4QrHAr0aXQCEFni2q4DqKLD31n2DL+RxcwnNjDFkSG0eNQ/xCavnRkfCUjsyqGC2OviNJvZOF/mQqZBw7i2C5Q== - dependencies: - "@babel/helper-compilation-targets" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.24.7" - "@babel/plugin-transform-object-super@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.23.3.tgz" @@ -1394,14 +884,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-replace-supers" "^7.22.20" -"@babel/plugin-transform-object-super@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz#66eeaff7830bba945dd8989b632a40c04ed625be" - integrity sha512-A/vVLwN6lBrMFmMDmPPz0jnE6ZGx7Jq7d6sT/Ev4H65RER6pZ+kczlf1DthF5N0qaPHBsI7UXiE8Zy66nmAovg== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-replace-supers" "^7.24.7" - "@babel/plugin-transform-optional-catch-binding@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.23.4.tgz" @@ -1410,14 +892,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" -"@babel/plugin-transform-optional-catch-binding@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz#00eabd883d0dd6a60c1c557548785919b6e717b4" - integrity sha512-uLEndKqP5BfBbC/5jTwPxLh9kqPWWgzN/f8w6UwAIirAEqiIVJWWY312X72Eub09g5KF9+Zn7+hT7sDxmhRuKA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-transform-optional-chaining@^7.23.3", "@babel/plugin-transform-optional-chaining@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.23.4.tgz" @@ -1427,15 +901,6 @@ "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" "@babel/plugin-syntax-optional-chaining" "^7.8.3" -"@babel/plugin-transform-optional-chaining@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.7.tgz#b8f6848a80cf2da98a8a204429bec04756c6d454" - integrity sha512-tK+0N9yd4j+x/4hxF3F0e0fu/VdcxU18y5SevtyM/PCFlQvXbR0Zmlo2eBrKtVipGNFzpq56o8WsIIKcJFUCRQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-transform-parameters@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz" @@ -1443,13 +908,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-parameters@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz#5881f0ae21018400e320fc7eb817e529d1254b68" - integrity sha512-yGWW5Rr+sQOhK0Ot8hjDJuxU3XLRQGflvT4lhlSY0DFvdb3TwKaY26CJzHtYllU0vT9j58hc37ndFPsqT1SrzA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-private-methods@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.23.3.tgz" @@ -1458,14 +916,6 @@ "@babel/helper-create-class-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-private-methods@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.7.tgz#e6318746b2ae70a59d023d5cc1344a2ba7a75f5e" - integrity sha512-COTCOkG2hn4JKGEKBADkA8WNb35TGkkRbI5iT845dB+NyqgO8Hn+ajPbSnIQznneJTa3d30scb6iz/DhH8GsJQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-private-property-in-object@^7.23.4": version "7.23.4" resolved "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.23.4.tgz" @@ -1476,16 +926,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-private-property-in-object" "^7.14.5" -"@babel/plugin-transform-private-property-in-object@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz#4eec6bc701288c1fab5f72e6a4bbc9d67faca061" - integrity sha512-9z76mxwnwFxMyxZWEgdgECQglF2Q7cFLm0kMf8pGwt+GSJsY0cONKj/UuO4bOH0w/uAel3ekS4ra5CEAyJRmDA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - "@babel/helper-create-class-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-transform-property-literals@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.23.3.tgz" @@ -1493,16 +933,9 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-property-literals@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz#f0d2ed8380dfbed949c42d4d790266525d63bbdc" - integrity sha512-EMi4MLQSHfd2nrCqQEWxFdha2gBCqU4ZcCng4WBGZ5CJL4bBRW0ptdqqDdeirGZcpALazVVNJqRmsO8/+oNCBA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-react-constant-elements@^7.21.3": version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.24.7.tgz#b85e8f240b14400277f106c9c9b585d9acf608a1" + resolved "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.24.7.tgz" integrity sha512-7LidzZfUXyfZ8/buRW6qIIHBY8wAZ1OrY9c/wTr8YhZ6vMPo+Uc/CVFLYY1spZrEQlD4w5u8wjqk5NQ3OVqQKA== dependencies: "@babel/helper-plugin-utils" "^7.24.7" @@ -1548,14 +981,6 @@ "@babel/helper-plugin-utils" "^7.22.5" regenerator-transform "^0.15.2" -"@babel/plugin-transform-regenerator@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz#021562de4534d8b4b1851759fd7af4e05d2c47f8" - integrity sha512-lq3fvXPdimDrlg6LWBoqj+r/DEWgONuwjuOuQCSYgRroXDH/IdM1C0IZf59fL5cHLpjEH/O6opIRBbqv7ELnuA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - regenerator-transform "^0.15.2" - "@babel/plugin-transform-reserved-words@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.23.3.tgz" @@ -1563,13 +988,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-reserved-words@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz#80037fe4fbf031fc1125022178ff3938bb3743a4" - integrity sha512-0DUq0pHcPKbjFZCfTss/pGkYMfy3vFWydkUBd9r0GHpIyfs2eCDENvqadMycRS9wZCXR41wucAfJHJmwA0UmoQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-runtime@^7.22.9": version "7.24.0" resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.24.0.tgz" @@ -1589,13 +1007,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-shorthand-properties@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz#85448c6b996e122fa9e289746140aaa99da64e73" - integrity sha512-KsDsevZMDsigzbA09+vacnLpmPH4aWjcZjXdyFKGzpplxhbeB4wYtury3vglQkg6KM/xEPKt73eCjPPf1PgXBA== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-spread@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.23.3.tgz" @@ -1604,14 +1015,6 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" -"@babel/plugin-transform-spread@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz#e8a38c0fde7882e0fb8f160378f74bd885cc7bb3" - integrity sha512-x96oO0I09dgMDxJaANcRyD4ellXFLLiWhuwDxKZX5g2rWP1bTPkBSwCYv96VDXVT1bD9aPj8tppr5ITIh8hBng== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" - "@babel/plugin-transform-sticky-regex@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.23.3.tgz" @@ -1619,13 +1022,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-sticky-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz#96ae80d7a7e5251f657b5cf18f1ea6bf926f5feb" - integrity sha512-kHPSIJc9v24zEml5geKg9Mjx5ULpfncj0wRpYtxbvKyTtHCYDkVE3aHQ03FrpEo4gEe2vrJJS1Y9CJTaThA52g== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-template-literals@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.23.3.tgz" @@ -1633,13 +1029,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-template-literals@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz#a05debb4a9072ae8f985bcf77f3f215434c8f8c8" - integrity sha512-AfDTQmClklHCOLxtGoP7HkeMw56k1/bTQjwsfhL6pppo/M4TOBSq+jjBUBLmV/4oeFg4GWMavIl44ZeCtmmZTw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-typeof-symbol@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.23.3.tgz" @@ -1647,13 +1036,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-typeof-symbol@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.7.tgz#f074be466580d47d6e6b27473a840c9f9ca08fb0" - integrity sha512-VtR8hDy7YLB7+Pet9IarXjg/zgCMSF+1mNS/EQEiEaUPoFXCVsHG64SIxcaaI2zJgRiv+YmgaQESUfWAdbjzgg== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-typescript@^7.23.3": version "7.23.6" resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.23.6.tgz" @@ -1661,18 +1043,8 @@ dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-create-class-features-plugin" "^7.23.6" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-typescript" "^7.23.3" - -"@babel/plugin-transform-typescript@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.7.tgz#b006b3e0094bf0813d505e0c5485679eeaf4a881" - integrity sha512-iLD3UNkgx2n/HrjBesVbYX6j0yqn/sJktvbtKKgcaLIQ4bTTQ8obAypc1VpyHPD2y4Phh9zHOaAt8e/L14wCpw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.24.7" - "@babel/helper-create-class-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-syntax-typescript" "^7.24.7" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-typescript" "^7.23.3" "@babel/plugin-transform-unicode-escapes@^7.23.3": version "7.23.3" @@ -1681,13 +1053,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-unicode-escapes@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz#2023a82ced1fb4971630a2e079764502c4148e0e" - integrity sha512-U3ap1gm5+4edc2Q/P+9VrBNhGkfnf+8ZqppY71Bo/pzZmXhhLdqgaUl6cuB07O1+AQJtCLfaOmswiNbSQ9ivhw== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-unicode-property-regex@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.23.3.tgz" @@ -1696,14 +1061,6 @@ "@babel/helper-create-regexp-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-unicode-property-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz#9073a4cd13b86ea71c3264659590ac086605bbcd" - integrity sha512-uH2O4OV5M9FZYQrwc7NdVmMxQJOCCzFeYudlZSzUAHRFeOujQefa92E74TQDVskNHCzOXoigEuoyzHDhaEaK5w== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-unicode-regex@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.23.3.tgz" @@ -1712,14 +1069,6 @@ "@babel/helper-create-regexp-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-unicode-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz#dfc3d4a51127108099b19817c0963be6a2adf19f" - integrity sha512-hlQ96MBZSAXUq7ltkjtu3FJCCSMx/j629ns3hA3pXnBXjanNP0LHi+JpPeA81zaWgVK1VGH95Xuy7u0RyQ8kMg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/plugin-transform-unicode-sets-regex@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.23.3.tgz" @@ -1728,102 +1077,7 @@ "@babel/helper-create-regexp-features-plugin" "^7.22.15" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-unicode-sets-regex@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.7.tgz#d40705d67523803a576e29c63cef6e516b858ed9" - integrity sha512-2G8aAvF4wy1w/AGZkemprdGMRg5o6zPNhbHVImRz3lss55TYCBd6xStN19rt8XJHq20sqV0JbyWjOWwQRwV/wg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - -"@babel/preset-env@^7.20.2": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.24.7.tgz#ff067b4e30ba4a72f225f12f123173e77b987f37" - integrity sha512-1YZNsc+y6cTvWlDHidMBsQZrZfEFjRIo/BZCT906PMdzOyXtSLTgqGdrpcuTDCXyd11Am5uQULtDIcCfnTc8fQ== - dependencies: - "@babel/compat-data" "^7.24.7" - "@babel/helper-compilation-targets" "^7.24.7" - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-validator-option" "^7.24.7" - "@babel/plugin-bugfix-firefox-class-in-computed-class-key" "^7.24.7" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.24.7" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.24.7" - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.24.7" - "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.24.7" - "@babel/plugin-syntax-import-attributes" "^7.24.7" - "@babel/plugin-syntax-import-meta" "^7.10.4" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" - "@babel/plugin-transform-arrow-functions" "^7.24.7" - "@babel/plugin-transform-async-generator-functions" "^7.24.7" - "@babel/plugin-transform-async-to-generator" "^7.24.7" - "@babel/plugin-transform-block-scoped-functions" "^7.24.7" - "@babel/plugin-transform-block-scoping" "^7.24.7" - "@babel/plugin-transform-class-properties" "^7.24.7" - "@babel/plugin-transform-class-static-block" "^7.24.7" - "@babel/plugin-transform-classes" "^7.24.7" - "@babel/plugin-transform-computed-properties" "^7.24.7" - "@babel/plugin-transform-destructuring" "^7.24.7" - "@babel/plugin-transform-dotall-regex" "^7.24.7" - "@babel/plugin-transform-duplicate-keys" "^7.24.7" - "@babel/plugin-transform-dynamic-import" "^7.24.7" - "@babel/plugin-transform-exponentiation-operator" "^7.24.7" - "@babel/plugin-transform-export-namespace-from" "^7.24.7" - "@babel/plugin-transform-for-of" "^7.24.7" - "@babel/plugin-transform-function-name" "^7.24.7" - "@babel/plugin-transform-json-strings" "^7.24.7" - "@babel/plugin-transform-literals" "^7.24.7" - "@babel/plugin-transform-logical-assignment-operators" "^7.24.7" - "@babel/plugin-transform-member-expression-literals" "^7.24.7" - "@babel/plugin-transform-modules-amd" "^7.24.7" - "@babel/plugin-transform-modules-commonjs" "^7.24.7" - "@babel/plugin-transform-modules-systemjs" "^7.24.7" - "@babel/plugin-transform-modules-umd" "^7.24.7" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.24.7" - "@babel/plugin-transform-new-target" "^7.24.7" - "@babel/plugin-transform-nullish-coalescing-operator" "^7.24.7" - "@babel/plugin-transform-numeric-separator" "^7.24.7" - "@babel/plugin-transform-object-rest-spread" "^7.24.7" - "@babel/plugin-transform-object-super" "^7.24.7" - "@babel/plugin-transform-optional-catch-binding" "^7.24.7" - "@babel/plugin-transform-optional-chaining" "^7.24.7" - "@babel/plugin-transform-parameters" "^7.24.7" - "@babel/plugin-transform-private-methods" "^7.24.7" - "@babel/plugin-transform-private-property-in-object" "^7.24.7" - "@babel/plugin-transform-property-literals" "^7.24.7" - "@babel/plugin-transform-regenerator" "^7.24.7" - "@babel/plugin-transform-reserved-words" "^7.24.7" - "@babel/plugin-transform-shorthand-properties" "^7.24.7" - "@babel/plugin-transform-spread" "^7.24.7" - "@babel/plugin-transform-sticky-regex" "^7.24.7" - "@babel/plugin-transform-template-literals" "^7.24.7" - "@babel/plugin-transform-typeof-symbol" "^7.24.7" - "@babel/plugin-transform-unicode-escapes" "^7.24.7" - "@babel/plugin-transform-unicode-property-regex" "^7.24.7" - "@babel/plugin-transform-unicode-regex" "^7.24.7" - "@babel/plugin-transform-unicode-sets-regex" "^7.24.7" - "@babel/preset-modules" "0.1.6-no-external-plugins" - babel-plugin-polyfill-corejs2 "^0.4.10" - babel-plugin-polyfill-corejs3 "^0.10.4" - babel-plugin-polyfill-regenerator "^0.6.1" - core-js-compat "^3.31.0" - semver "^6.3.1" - -"@babel/preset-env@^7.22.9": +"@babel/preset-env@^7.20.2", "@babel/preset-env@^7.22.9": version "7.24.0" resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.0.tgz" integrity sha512-ZxPEzV9IgvGn73iK0E6VB9/95Nd7aMFpbE0l8KQFDG70cOV9IxRP7Y2FUPmlK0v6ImlLqYX50iuZ3ZTVhOF2lA== @@ -1930,18 +1184,7 @@ "@babel/plugin-transform-react-jsx-development" "^7.22.5" "@babel/plugin-transform-react-pure-annotations" "^7.23.3" -"@babel/preset-typescript@^7.21.0": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz#66cd86ea8f8c014855671d5ea9a737139cbbfef1" - integrity sha512-SyXRe3OdWwIwalxDg5UtJnJQO+YPcTfwiIY2B0Xlddh9o7jpWLvv8X1RthIeDOxQ+O1ML5BLPCONToObyVQVuQ== - dependencies: - "@babel/helper-plugin-utils" "^7.24.7" - "@babel/helper-validator-option" "^7.24.7" - "@babel/plugin-syntax-jsx" "^7.24.7" - "@babel/plugin-transform-modules-commonjs" "^7.24.7" - "@babel/plugin-transform-typescript" "^7.24.7" - -"@babel/preset-typescript@^7.22.5": +"@babel/preset-typescript@^7.21.0", "@babel/preset-typescript@^7.22.5": version "7.23.3" resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.23.3.tgz" integrity sha512-17oIGVlqz6CchO9RFYn5U6ZpWRZIngayYCtrPRSgANSwC2V1Jb+iP74nVxzzXJte8b8BYxrL1yY96xfhTBrNNQ== @@ -1981,15 +1224,6 @@ "@babel/parser" "^7.24.0" "@babel/types" "^7.24.0" -"@babel/template@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.24.7.tgz#02efcee317d0609d2c07117cb70ef8fb17ab7315" - integrity sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig== - dependencies: - "@babel/code-frame" "^7.24.7" - "@babel/parser" "^7.24.7" - "@babel/types" "^7.24.7" - "@babel/traverse@^7.22.8", "@babel/traverse@^7.24.0": version "7.24.0" resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.0.tgz" @@ -2006,32 +1240,7 @@ debug "^4.3.1" globals "^11.1.0" -"@babel/traverse@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.24.7.tgz#de2b900163fa741721ba382163fe46a936c40cf5" - integrity sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA== - dependencies: - "@babel/code-frame" "^7.24.7" - "@babel/generator" "^7.24.7" - "@babel/helper-environment-visitor" "^7.24.7" - "@babel/helper-function-name" "^7.24.7" - "@babel/helper-hoist-variables" "^7.24.7" - "@babel/helper-split-export-declaration" "^7.24.7" - "@babel/parser" "^7.24.7" - "@babel/types" "^7.24.7" - debug "^4.3.1" - globals "^11.1.0" - -"@babel/types@^7.21.3", "@babel/types@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.24.7.tgz#6027fe12bc1aa724cd32ab113fb7f1988f1f66f2" - integrity sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q== - dependencies: - "@babel/helper-string-parser" "^7.24.7" - "@babel/helper-validator-identifier" "^7.24.7" - to-fast-properties "^2.0.0" - -"@babel/types@^7.22.15", "@babel/types@^7.22.19", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.4", "@babel/types@^7.23.6", "@babel/types@^7.24.0", "@babel/types@^7.4.4": +"@babel/types@^7.21.3", "@babel/types@^7.22.15", "@babel/types@^7.22.19", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.4", "@babel/types@^7.23.6", "@babel/types@^7.24.0", "@babel/types@^7.4.4": version "7.24.0" resolved "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz" integrity sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w== @@ -2055,25 +1264,25 @@ resolved "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz" integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== -"@docsearch/css@3.6.0": - version "3.6.0" - resolved "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz" - integrity sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ== +"@docsearch/css@3.6.1": + version "3.6.1" + resolved "https://registry.npmjs.org/@docsearch/css/-/css-3.6.1.tgz" + integrity sha512-VtVb5DS+0hRIprU2CO6ZQjK2Zg4QU5HrDM1+ix6rT0umsYvFvatMAnf97NHZlVWDaaLlx7GRfR/7FikANiM2Fg== "@docsearch/react@^3.5.2": - version "3.6.0" - resolved "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz" - integrity sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w== + version "3.6.1" + resolved "https://registry.npmjs.org/@docsearch/react/-/react-3.6.1.tgz" + integrity sha512-qXZkEPvybVhSXj0K7U3bXc233tk5e8PfhoZ6MhPOiik/qUQxYC+Dn9DnoS7CxHQQhHfCvTiN0eY9M12oRghEXw== dependencies: "@algolia/autocomplete-core" "1.9.3" "@algolia/autocomplete-preset-algolia" "1.9.3" - "@docsearch/css" "3.6.0" + "@docsearch/css" "3.6.1" algoliasearch "^4.19.1" -"@docusaurus/core@3.4.0", "@docusaurus/core@^3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-3.4.0.tgz#bdbf1af4b2f25d1bf4a5b62ec6137d84c821cb3c" - integrity sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w== +"@docusaurus/core@^3.4.0", "@docusaurus/core@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/core/-/core-3.5.2.tgz" + integrity sha512-4Z1WkhCSkX4KO0Fw5m/Vuc7Q3NxBG53NE5u59Rs96fWkMPZVSrzEPP16/Nk6cWb/shK7xXPndTmalJtw7twL/w== dependencies: "@babel/core" "^7.23.3" "@babel/generator" "^7.23.3" @@ -2085,12 +1294,12 @@ "@babel/runtime" "^7.22.6" "@babel/runtime-corejs3" "^7.22.6" "@babel/traverse" "^7.22.8" - "@docusaurus/cssnano-preset" "3.4.0" - "@docusaurus/logger" "3.4.0" - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/cssnano-preset" "3.5.2" + "@docusaurus/logger" "3.5.2" + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" autoprefixer "^10.4.14" babel-loader "^9.1.3" babel-plugin-dynamic-import-node "^2.3.3" @@ -2144,32 +1353,32 @@ webpack-merge "^5.9.0" webpackbar "^5.0.2" -"@docusaurus/cssnano-preset@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz#dc7922b3bbeabcefc9b60d0161680d81cf72c368" - integrity sha512-qwLFSz6v/pZHy/UP32IrprmH5ORce86BGtN0eBtG75PpzQJAzp9gefspox+s8IEOr0oZKuQ/nhzZ3xwyc3jYJQ== +"@docusaurus/cssnano-preset@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.5.2.tgz" + integrity sha512-D3KiQXOMA8+O0tqORBrTOEQyQxNIfPm9jEaJoALjjSjc2M/ZAWcUfPQEnwr2JB2TadHw2gqWgpZckQmrVWkytA== dependencies: cssnano-preset-advanced "^6.1.2" postcss "^8.4.38" postcss-sort-media-queries "^5.2.0" tslib "^2.6.0" -"@docusaurus/logger@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-3.4.0.tgz#8b0ac05c7f3dac2009066e2f964dee8209a77403" - integrity sha512-bZwkX+9SJ8lB9kVRkXw+xvHYSMGG4bpYHKGXeXFvyVc79NMeeBSGgzd4TQLHH+DYeOJoCdl8flrFJVxlZ0wo/Q== +"@docusaurus/logger@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.5.2.tgz" + integrity sha512-LHC540SGkeLfyT3RHK3gAMK6aS5TRqOD4R72BEU/DE2M/TY8WwEUAMY576UUc/oNJXv8pGhBmQB6N9p3pt8LQw== dependencies: chalk "^4.1.2" tslib "^2.6.0" -"@docusaurus/mdx-loader@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz#483d7ab57928fdbb5c8bd1678098721a930fc5f6" - integrity sha512-kSSbrrk4nTjf4d+wtBA9H+FGauf2gCax89kV8SUSJu3qaTdSIKdWERlngsiHaCFgZ7laTJ8a67UFf+xlFPtuTw== +"@docusaurus/mdx-loader@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.5.2.tgz" + integrity sha512-ku3xO9vZdwpiMIVd8BzWV0DCqGEbCP5zs1iHfKX50vw6jX8vQo0ylYo1YJMZyz6e+JFJ17HYHT5FzVidz2IflA== dependencies: - "@docusaurus/logger" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/logger" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" "@mdx-js/mdx" "^3.0.0" "@slorber/remark-comment" "^1.0.0" escape-html "^1.0.3" @@ -2192,12 +1401,12 @@ vfile "^6.0.1" webpack "^5.88.1" -"@docusaurus/module-type-aliases@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz#2653bde58fc1aa3dbc626a6c08cfb63a37ae1bb8" - integrity sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw== +"@docusaurus/module-type-aliases@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.5.2.tgz" + integrity sha512-Z+Xu3+2rvKef/YKTMxZHsEXp1y92ac0ngjDiExRdqGTmEKtCUpkbNYH8v5eXo5Ls+dnW88n6WTa+Q54kLOkwPg== dependencies: - "@docusaurus/types" "3.4.0" + "@docusaurus/types" "3.5.2" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" @@ -2205,19 +1414,20 @@ react-helmet-async "*" react-loadable "npm:@docusaurus/react-loadable@6.0.0" -"@docusaurus/plugin-content-blog@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz#6373632fdbababbda73a13c4a08f907d7de8f007" - integrity sha512-vv6ZAj78ibR5Jh7XBUT4ndIjmlAxkijM3Sx5MAAzC1gyv0vupDQNhzuFg1USQmQVj3P5I6bquk12etPV3LJ+Xw== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/logger" "3.4.0" - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" - cheerio "^1.0.0-rc.12" +"@docusaurus/plugin-content-blog@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.5.2.tgz" + integrity sha512-R7ghWnMvjSf+aeNDH0K4fjyQnt5L0KzUEnUhmf1e3jZrv3wogeytZNN6n7X8yHcMsuZHPOrctQhXWnmxu+IRRg== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/logger" "3.5.2" + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" + cheerio "1.0.0-rc.12" feed "^4.2.2" fs-extra "^11.1.1" lodash "^4.17.21" @@ -2228,19 +1438,20 @@ utility-types "^3.10.0" webpack "^5.88.1" -"@docusaurus/plugin-content-docs@3.4.0", "@docusaurus/plugin-content-docs@^2 || ^3": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz#3088973f72169a2a6d533afccec7153c8720d332" - integrity sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/logger" "3.4.0" - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/module-type-aliases" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" +"@docusaurus/plugin-content-docs@*", "@docusaurus/plugin-content-docs@^2 || ^3", "@docusaurus/plugin-content-docs@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.5.2.tgz" + integrity sha512-Bt+OXn/CPtVqM3Di44vHjE7rPCEsRCB/DMo2qoOuozB9f7+lsdrHvD0QCHdBs0uhz6deYJDppAr2VgqybKPlVQ== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/logger" "3.5.2" + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/module-type-aliases" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" "@types/react-router-config" "^5.0.7" combine-promises "^1.1.0" fs-extra "^11.1.1" @@ -2250,118 +1461,118 @@ utility-types "^3.10.0" webpack "^5.88.1" -"@docusaurus/plugin-content-pages@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.4.0.tgz#1846172ca0355c7d32a67ef8377750ce02bbb8ad" - integrity sha512-h2+VN/0JjpR8fIkDEAoadNjfR3oLzB+v1qSXbIAKjQ46JAHx3X22n9nqS+BWSQnTnp1AjkjSvZyJMekmcwxzxg== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" +"@docusaurus/plugin-content-pages@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.5.2.tgz" + integrity sha512-WzhHjNpoQAUz/ueO10cnundRz+VUtkjFhhaQ9jApyv1a46FPURO4cef89pyNIOMny1fjDz/NUN2z6Yi+5WUrCw== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" fs-extra "^11.1.1" tslib "^2.6.0" webpack "^5.88.1" -"@docusaurus/plugin-debug@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-3.4.0.tgz#74e4ec5686fa314c26f3ac150bacadbba7f06948" - integrity sha512-uV7FDUNXGyDSD3PwUaf5YijX91T5/H9SX4ErEcshzwgzWwBtK37nUWPU3ZLJfeTavX3fycTOqk9TglpOLaWkCg== +"@docusaurus/plugin-debug@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.5.2.tgz" + integrity sha512-kBK6GlN0itCkrmHuCS6aX1wmoWc5wpd5KJlqQ1FyrF0cLDnvsYSnh7+ftdwzt7G6lGBho8lrVwkkL9/iQvaSOA== dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" + "@docusaurus/core" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" fs-extra "^11.1.1" react-json-view-lite "^1.2.0" tslib "^2.6.0" -"@docusaurus/plugin-google-analytics@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.4.0.tgz#5f59fc25329a59decc231936f6f9fb5663da3c55" - integrity sha512-mCArluxEGi3cmYHqsgpGGt3IyLCrFBxPsxNZ56Mpur0xSlInnIHoeLDH7FvVVcPJRPSQ9/MfRqLsainRw+BojA== +"@docusaurus/plugin-google-analytics@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.5.2.tgz" + integrity sha512-rjEkJH/tJ8OXRE9bwhV2mb/WP93V441rD6XnM6MIluu7rk8qg38iSxS43ga2V2Q/2ib53PcqbDEJDG/yWQRJhQ== dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/core" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" tslib "^2.6.0" -"@docusaurus/plugin-google-gtag@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.4.0.tgz#42489ac5fe1c83b5523ceedd5ef74f9aa8bc251b" - integrity sha512-Dsgg6PLAqzZw5wZ4QjUYc8Z2KqJqXxHxq3vIoyoBWiLEEfigIs7wHR+oiWUQy3Zk9MIk6JTYj7tMoQU0Jm3nqA== +"@docusaurus/plugin-google-gtag@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.5.2.tgz" + integrity sha512-lm8XL3xLkTPHFKKjLjEEAHUrW0SZBSHBE1I+i/tmYMBsjCcUB5UJ52geS5PSiOCFVR74tbPGcPHEV/gaaxFeSA== dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/core" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" "@types/gtag.js" "^0.0.12" tslib "^2.6.0" -"@docusaurus/plugin-google-tag-manager@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz#cebb03a5ffa1e70b37d95601442babea251329ff" - integrity sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ== +"@docusaurus/plugin-google-tag-manager@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.5.2.tgz" + integrity sha512-QkpX68PMOMu10Mvgvr5CfZAzZQFx8WLlOiUQ/Qmmcl6mjGK6H21WLT5x7xDmcpCoKA/3CegsqIqBR+nA137lQg== dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/core" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" tslib "^2.6.0" -"@docusaurus/plugin-sitemap@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.4.0.tgz#b091d64d1e3c6c872050189999580187537bcbc6" - integrity sha512-+0VDvx9SmNrFNgwPoeoCha+tRoAjopwT0+pYO1xAbyLcewXSemq+eLxEa46Q1/aoOaJQ0qqHELuQM7iS2gp33Q== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/logger" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" +"@docusaurus/plugin-sitemap@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.5.2.tgz" + integrity sha512-DnlqYyRAdQ4NHY28TfHuVk414ft2uruP4QWCH//jzpHjqvKyXjj2fmDtI8RPUBh9K8iZKFMHRnLtzJKySPWvFA== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/logger" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" fs-extra "^11.1.1" sitemap "^7.1.1" tslib "^2.6.0" "@docusaurus/preset-classic@^3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-3.4.0.tgz#6082a32fbb465b0cb2c2a50ebfc277cff2c0f139" - integrity sha512-Ohj6KB7siKqZaQhNJVMBBUzT3Nnp6eTKqO+FXO3qu/n1hJl3YLwVKTWBg28LF7MWrKu46UuYavwMRxud0VyqHg== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/plugin-content-blog" "3.4.0" - "@docusaurus/plugin-content-docs" "3.4.0" - "@docusaurus/plugin-content-pages" "3.4.0" - "@docusaurus/plugin-debug" "3.4.0" - "@docusaurus/plugin-google-analytics" "3.4.0" - "@docusaurus/plugin-google-gtag" "3.4.0" - "@docusaurus/plugin-google-tag-manager" "3.4.0" - "@docusaurus/plugin-sitemap" "3.4.0" - "@docusaurus/theme-classic" "3.4.0" - "@docusaurus/theme-common" "3.4.0" - "@docusaurus/theme-search-algolia" "3.4.0" - "@docusaurus/types" "3.4.0" - -"@docusaurus/theme-classic@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-3.4.0.tgz#1b0f48edec3e3ec8927843554b9f11e5927b0e52" - integrity sha512-0IPtmxsBYv2adr1GnZRdMkEQt1YW6tpzrUPj02YxNpvJ5+ju4E13J5tB4nfdaen/tfR1hmpSPlTFPvTf4kwy8Q== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/module-type-aliases" "3.4.0" - "@docusaurus/plugin-content-blog" "3.4.0" - "@docusaurus/plugin-content-docs" "3.4.0" - "@docusaurus/plugin-content-pages" "3.4.0" - "@docusaurus/theme-common" "3.4.0" - "@docusaurus/theme-translations" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.5.2.tgz" + integrity sha512-3ihfXQ95aOHiLB5uCu+9PRy2gZCeSZoDcqpnDvf3B+sTrMvMTr8qRUzBvWkoIqc82yG5prCboRjk1SVILKx6sg== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/plugin-content-blog" "3.5.2" + "@docusaurus/plugin-content-docs" "3.5.2" + "@docusaurus/plugin-content-pages" "3.5.2" + "@docusaurus/plugin-debug" "3.5.2" + "@docusaurus/plugin-google-analytics" "3.5.2" + "@docusaurus/plugin-google-gtag" "3.5.2" + "@docusaurus/plugin-google-tag-manager" "3.5.2" + "@docusaurus/plugin-sitemap" "3.5.2" + "@docusaurus/theme-classic" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/theme-search-algolia" "3.5.2" + "@docusaurus/types" "3.5.2" + +"@docusaurus/theme-classic@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.5.2.tgz" + integrity sha512-XRpinSix3NBv95Rk7xeMF9k4safMkwnpSgThn0UNQNumKvmcIYjfkwfh2BhwYh/BxMXQHJ/PdmNh22TQFpIaYg== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/module-type-aliases" "3.5.2" + "@docusaurus/plugin-content-blog" "3.5.2" + "@docusaurus/plugin-content-docs" "3.5.2" + "@docusaurus/plugin-content-pages" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/theme-translations" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" "@mdx-js/react" "^3.0.0" clsx "^2.0.0" copy-text-to-clipboard "^3.2.0" - infima "0.2.0-alpha.43" + infima "0.2.0-alpha.44" lodash "^4.17.21" nprogress "^0.2.0" postcss "^8.4.26" @@ -2372,18 +1583,15 @@ tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-common@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-3.4.0.tgz#01f2b728de6cb57f6443f52fc30675cf12a5d49f" - integrity sha512-0A27alXuv7ZdCg28oPE8nH/Iz73/IUejVaCazqu9elS4ypjiLhK3KfzdSQBnL/g7YfHSlymZKdiOHEo8fJ0qMA== - dependencies: - "@docusaurus/mdx-loader" "3.4.0" - "@docusaurus/module-type-aliases" "3.4.0" - "@docusaurus/plugin-content-blog" "3.4.0" - "@docusaurus/plugin-content-docs" "3.4.0" - "@docusaurus/plugin-content-pages" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" +"@docusaurus/theme-common@^2 || ^3", "@docusaurus/theme-common@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.5.2.tgz" + integrity sha512-QXqlm9S6x9Ibwjs7I2yEDgsCocp708DrCrgHgKwg2n2AY0YQ6IjU0gAK35lHRLOvAoJUfCKpQAwUykB0R7+Eew== + dependencies: + "@docusaurus/mdx-loader" "3.5.2" + "@docusaurus/module-type-aliases" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" @@ -2394,31 +1602,31 @@ utility-types "^3.10.0" "@docusaurus/theme-mermaid@^3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-mermaid/-/theme-mermaid-3.4.0.tgz#ef1d2231d0858767f67538b4fafd7d0ce2a3e845" - integrity sha512-3w5QW0HEZ2O6x2w6lU3ZvOe1gNXP2HIoKDMJBil1VmLBc9PmpAG17VmfhI/p3L2etNmOiVs5GgniUqvn8AFEGQ== - dependencies: - "@docusaurus/core" "3.4.0" - "@docusaurus/module-type-aliases" "3.4.0" - "@docusaurus/theme-common" "3.4.0" - "@docusaurus/types" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/theme-mermaid/-/theme-mermaid-3.5.2.tgz" + integrity sha512-7vWCnIe/KoyTN1Dc55FIyqO5hJ3YaV08Mr63Zej0L0mX1iGzt+qKSmeVUAJ9/aOalUhF0typV0RmNUSy5FAmCg== + dependencies: + "@docusaurus/core" "3.5.2" + "@docusaurus/module-type-aliases" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/types" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" mermaid "^10.4.0" tslib "^2.6.0" -"@docusaurus/theme-search-algolia@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz#c499bad71d668df0d0f15b0e5e33e2fc4e330fcc" - integrity sha512-aiHFx7OCw4Wck1z6IoShVdUWIjntC8FHCw9c5dR8r3q4Ynh+zkS8y2eFFunN/DL6RXPzpnvKCg3vhLQYJDmT9Q== +"@docusaurus/theme-search-algolia@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.5.2.tgz" + integrity sha512-qW53kp3VzMnEqZGjakaV90sst3iN1o32PH+nawv1uepROO8aEGxptcq2R5rsv7aBShSRbZwIobdvSYKsZ5pqvA== dependencies: "@docsearch/react" "^3.5.2" - "@docusaurus/core" "3.4.0" - "@docusaurus/logger" "3.4.0" - "@docusaurus/plugin-content-docs" "3.4.0" - "@docusaurus/theme-common" "3.4.0" - "@docusaurus/theme-translations" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-validation" "3.4.0" + "@docusaurus/core" "3.5.2" + "@docusaurus/logger" "3.5.2" + "@docusaurus/plugin-content-docs" "3.5.2" + "@docusaurus/theme-common" "3.5.2" + "@docusaurus/theme-translations" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-validation" "3.5.2" algoliasearch "^4.18.0" algoliasearch-helper "^3.13.3" clsx "^2.0.0" @@ -2428,18 +1636,18 @@ tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-translations@3.4.0", "@docusaurus/theme-translations@^2 || ^3": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz#e6355d01352886c67e38e848b2542582ea3070af" - integrity sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg== +"@docusaurus/theme-translations@^2 || ^3", "@docusaurus/theme-translations@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.5.2.tgz" + integrity sha512-GPZLcu4aT1EmqSTmbdpVrDENGR2yObFEX8ssEFYTCiAIVc0EihNSdOIBTazUvgNqwvnoU1A8vIs1xyzc3LITTw== dependencies: fs-extra "^11.1.1" tslib "^2.6.0" -"@docusaurus/types@3.4.0": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-3.4.0.tgz#237c3f737e9db3f7c1a5935a3ef48d6eadde8292" - integrity sha512-4jcDO8kXi5Cf9TcyikB/yKmz14f2RZ2qTRerbHAsS+5InE9ZgSLBNLsewtFTcTOXSVcbU3FoGOzcNWAmU1TR0A== +"@docusaurus/types@*", "@docusaurus/types@^3.0.0", "@docusaurus/types@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/types/-/types-3.5.2.tgz" + integrity sha512-N6GntLXoLVUwkZw7zCxwy9QiuEXIcTVzA9AkmNw16oc0AP3SXLrMmDMMBIfgqwuKWa6Ox6epHol9kMtJqekACw== dependencies: "@mdx-js/mdx" "^3.0.0" "@types/history" "^4.7.11" @@ -2451,48 +1659,34 @@ webpack "^5.88.1" webpack-merge "^5.9.0" -"@docusaurus/types@^3.0.0": - version "3.0.0" - resolved "https://registry.npmjs.org/@docusaurus/types/-/types-3.0.0.tgz" - integrity sha512-Qb+l/hmCOVemReuzvvcFdk84bUmUFyD0Zi81y651ie3VwMrXqC7C0E7yZLKMOsLj/vkqsxHbtkAuYMI89YzNzg== - dependencies: - "@types/history" "^4.7.11" - "@types/react" "*" - commander "^5.1.0" - joi "^17.9.2" - react-helmet-async "^1.3.0" - utility-types "^3.10.0" - webpack "^5.88.1" - webpack-merge "^5.9.0" - -"@docusaurus/utils-common@3.4.0", "@docusaurus/utils-common@^2 || ^3": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-3.4.0.tgz#2a43fefd35b85ab9fcc6833187e66c15f8bfbbc6" - integrity sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ== +"@docusaurus/utils-common@^2 || ^3", "@docusaurus/utils-common@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.5.2.tgz" + integrity sha512-i0AZjHiRgJU6d7faQngIhuHKNrszpL/SHQPgF1zH4H+Ij6E9NBYGy6pkcGWToIv7IVPbs+pQLh1P3whn0gWXVg== dependencies: tslib "^2.6.0" -"@docusaurus/utils-validation@3.4.0", "@docusaurus/utils-validation@^2 || ^3": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz#0176f6e503ff45f4390ec2ecb69550f55e0b5eb7" - integrity sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g== +"@docusaurus/utils-validation@^2 || ^3", "@docusaurus/utils-validation@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.5.2.tgz" + integrity sha512-m+Foq7augzXqB6HufdS139PFxDC5d5q2QKZy8q0qYYvGdI6nnlNsGH4cIGsgBnV7smz+mopl3g4asbSDvMV0jA== dependencies: - "@docusaurus/logger" "3.4.0" - "@docusaurus/utils" "3.4.0" - "@docusaurus/utils-common" "3.4.0" + "@docusaurus/logger" "3.5.2" + "@docusaurus/utils" "3.5.2" + "@docusaurus/utils-common" "3.5.2" fs-extra "^11.2.0" joi "^17.9.2" js-yaml "^4.1.0" lodash "^4.17.21" tslib "^2.6.0" -"@docusaurus/utils@3.4.0", "@docusaurus/utils@^2 || ^3": - version "3.4.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-3.4.0.tgz#c508e20627b7a55e2b541e4a28c95e0637d6a204" - integrity sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g== +"@docusaurus/utils@^2 || ^3", "@docusaurus/utils@3.5.2": + version "3.5.2" + resolved "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.5.2.tgz" + integrity sha512-33QvcNFh+Gv+C2dP9Y9xWEzMgf3JzrpL2nW9PopidiohS1nDcyknKRx2DWaFvyVTTYIkkABVSr073VTj/NITNA== dependencies: - "@docusaurus/logger" "3.4.0" - "@docusaurus/utils-common" "3.4.0" + "@docusaurus/logger" "3.5.2" + "@docusaurus/utils-common" "3.5.2" "@svgr/webpack" "^8.1.0" escape-string-regexp "^4.0.0" file-loader "^6.2.0" @@ -2514,16 +1708,16 @@ "@easyops-cn/autocomplete.js@^0.38.1": version "0.38.1" - resolved "https://registry.yarnpkg.com/@easyops-cn/autocomplete.js/-/autocomplete.js-0.38.1.tgz#46dff5795a9a032fa9b9250fdf63ca6c61c07629" + resolved "https://registry.npmjs.org/@easyops-cn/autocomplete.js/-/autocomplete.js-0.38.1.tgz" integrity sha512-drg76jS6syilOUmVNkyo1c7ZEBPcPuK+aJA7AksM5ZIIbV57DMHCywiCr+uHyv8BE5jUTU98j/H7gVrkHrWW3Q== dependencies: cssesc "^3.0.0" immediate "^3.2.3" -"@easyops-cn/docusaurus-search-local@^0.44.2": - version "0.44.2" - resolved "https://registry.yarnpkg.com/@easyops-cn/docusaurus-search-local/-/docusaurus-search-local-0.44.2.tgz#580925d8b94220cecbe30c466bdc0b32cb275cf6" - integrity sha512-4tMBU54R1O6ITxkMGwOEifSHNkZLa2fb4ajGc8rd6TYZ0a8+jlu/u/5gYtw1s6sGGMRkwyG+QI6HD0bEnCRa1w== +"@easyops-cn/docusaurus-search-local@^0.44.5": + version "0.44.5" + resolved "https://registry.npmjs.org/@easyops-cn/docusaurus-search-local/-/docusaurus-search-local-0.44.5.tgz" + integrity sha512-jT3wuYVzRoeB1gea+2iDtOMme0fD2h3M8HDVgs3garITO6vRxvEraFRVlYkfjLN9BkmzjMlz9nn7MI4qIx8utw== dependencies: "@docusaurus/plugin-content-docs" "^2 || ^3" "@docusaurus/theme-translations" "^2 || ^3" @@ -2532,7 +1726,7 @@ "@docusaurus/utils-validation" "^2 || ^3" "@easyops-cn/autocomplete.js" "^0.38.1" "@node-rs/jieba" "^1.6.0" - cheerio "^1.0.0-rc.3" + cheerio "^1.0.0" clsx "^1.1.1" debug "^4.2.0" fs-extra "^10.0.0" @@ -2542,28 +1736,6 @@ mark.js "^8.11.1" tslib "^2.4.0" -"@emnapi/core@^1.1.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.2.0.tgz#7b738e5033738132bf6af0b8fae7b05249bdcbd7" - integrity sha512-E7Vgw78I93we4ZWdYCb4DGAwRROGkMIXk7/y87UmANR+J6qsWusmC3gLt0H+O0KOt5e6O38U8oJamgbudrES/w== - dependencies: - "@emnapi/wasi-threads" "1.0.1" - tslib "^2.4.0" - -"@emnapi/runtime@^1.1.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.2.0.tgz#71d018546c3a91f3b51106530edbc056b9f2f2e3" - integrity sha512-bV21/9LQmcQeCPEg3BDFtvwL6cwiTMksYNWQQ4KOxCZikEGalWtenoZ0wCiukJINlGCIi2KXx01g4FoH/LxpzQ== - dependencies: - tslib "^2.4.0" - -"@emnapi/wasi-threads@1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.0.1.tgz#d7ae71fd2166b1c916c6cd2d0df2ef565a2e1a5b" - integrity sha512-iIBu7mwkq4UQGeMEM8bLwNK962nXdhodeScX4slfQnRhEMMzvYivHhutCIk8uojvmASXXPC2WNEjwxFWk72Oqw== - dependencies: - tslib "^2.4.0" - "@hapi/hoek@^9.0.0", "@hapi/hoek@^9.3.0": version "9.3.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz" @@ -2676,90 +1848,19 @@ dependencies: "@types/mdx" "^2.0.0" -"@napi-rs/wasm-runtime@^0.2.3": - version "0.2.4" - resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.4.tgz#d27788176f250d86e498081e3c5ff48a17606918" - integrity sha512-9zESzOO5aDByvhIAsOy9TbpZ0Ur2AJbUI7UT73kcUTS2mxAMHOBaa1st/jAymNoCtvrit99kkzT1FZuXVcgfIQ== - dependencies: - "@emnapi/core" "^1.1.0" - "@emnapi/runtime" "^1.1.0" - "@tybys/wasm-util" "^0.9.0" - -"@node-rs/jieba-android-arm-eabi@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-android-arm-eabi/-/jieba-android-arm-eabi-1.10.3.tgz#821af26a4953b3fbdf2f80a4d08a9d9114b40bea" - integrity sha512-fuqVtaYlUKZg3cqagYFxj1DSa7ZHKXLle4iGH2kbQWg7Kw6cf7aCYBHIUZuH5sliK10M/CWccZ+SGRUwcSGfbg== - -"@node-rs/jieba-android-arm64@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-android-arm64/-/jieba-android-arm64-1.10.3.tgz#e5c285fb8de71739dfa3a83d894adcadb799c404" - integrity sha512-iuZZZq5yD9lT+AgaXpFe19gtAsIecUODRLLaBFbavjgjLk5cumv38ytWjS36s/eqptwI15MQfysSYOlWtMEG5g== - -"@node-rs/jieba-darwin-arm64@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-darwin-arm64/-/jieba-darwin-arm64-1.10.3.tgz#67df85df39ff60dcc3e084f6e36e5182779b69ad" - integrity sha512-dwPhkav1tEARskwPz91UUXL2NXy4h0lJYTuJzpGgwXxm552zBM2JJ41kjah1364j+EOq5At3NQvf5r5rH89phQ== - -"@node-rs/jieba-darwin-x64@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-darwin-x64/-/jieba-darwin-x64-1.10.3.tgz#ffdc8a63335294d7c68d3aebec870ec0824ebe98" - integrity sha512-kjxvV6G1baQo/2I3mELv5qGv4Q0rhd5srwXhypSxMWZFtSpNwCDsLcIOR5bvMBci6QVFfZOs6WD6DKiWVz0SlA== - -"@node-rs/jieba-freebsd-x64@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-freebsd-x64/-/jieba-freebsd-x64-1.10.3.tgz#188349a9074b200af4a3e8a0ea169f45efd6c162" - integrity sha512-QYTsn+zlWRil+MuBeLfTK5Md4GluOf2lHnFqjrOZW2oMgNOvxB3qoLV4TUf70S/E2XHeP6PUdjCKItX8C7GQPg== - -"@node-rs/jieba-linux-arm-gnueabihf@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm-gnueabihf/-/jieba-linux-arm-gnueabihf-1.10.3.tgz#e1831b7b08a32904b12860555978c50222a97b54" - integrity sha512-UFB43kDOvqmbRl99e3GPwaTuwJZaAvgLaMTvBkmxww4MpQH6G1k31RLzMW/S21uSQso2lj6W/Mm59gaJk2FiyA== - -"@node-rs/jieba-linux-arm64-gnu@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm64-gnu/-/jieba-linux-arm64-gnu-1.10.3.tgz#326712eb7418f9796b113af93afe59ab64c37add" - integrity sha512-bu++yWi10wZtnS5uLcwxzxKmHVT77NgQMK8JiQr1TWCl3Y1Th7CnEHQtxfVB489edDK8l644h1/4zSTe5fRnOQ== - -"@node-rs/jieba-linux-arm64-musl@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm64-musl/-/jieba-linux-arm64-musl-1.10.3.tgz#6a3149d5abbe09f7c7748da219d5c39522b36c8a" - integrity sha512-pJh+SzrK1HaKakhdFM+ew9vXwpZqMxy9u0U7J4GT+3GvOwnAZ+KjeaHebIfgOz7ZHvp/T4YBNf8oWW4zwj3AJw== - "@node-rs/jieba-linux-x64-gnu@1.10.3": version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-x64-gnu/-/jieba-linux-x64-gnu-1.10.3.tgz#5d75fbc62a36cbb79137284abe4f432da06c2c80" + resolved "https://registry.npmjs.org/@node-rs/jieba-linux-x64-gnu/-/jieba-linux-x64-gnu-1.10.3.tgz" integrity sha512-GF5cfvu/0wXO2fVX/XV3WYH/xEGWzMBvfqLhGiA1OA1xHIufnA1T7uU3ZXkyoNi5Bzf6dmxnwtE4CJL0nvhwjQ== "@node-rs/jieba-linux-x64-musl@1.10.3": version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-x64-musl/-/jieba-linux-x64-musl-1.10.3.tgz#fce3aa9c394dbc51b4b3e92d29b385b4c4f23aec" + resolved "https://registry.npmjs.org/@node-rs/jieba-linux-x64-musl/-/jieba-linux-x64-musl-1.10.3.tgz" integrity sha512-h45HMVU/hgzQ0saXNsK9fKlGdah1i1cXZULpB5vQRlRL2ZIaGp+ULtWTogS7vkoo2K8s2l4tqakWMg9eUjIJ2A== -"@node-rs/jieba-wasm32-wasi@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-wasm32-wasi/-/jieba-wasm32-wasi-1.10.3.tgz#b852eb2c9b8c81c5514ed8bb76d74c1cdf66fe76" - integrity sha512-vuoQ62vVoedNGcBmIi4UWdtNBOZG8B+vDYfjx3FD6rNg6g/RgwbVjYXbOVMOQwX06Ob9CfrutICXdUGHgoxzEQ== - dependencies: - "@napi-rs/wasm-runtime" "^0.2.3" - -"@node-rs/jieba-win32-arm64-msvc@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-arm64-msvc/-/jieba-win32-arm64-msvc-1.10.3.tgz#eefce48df8ec0496a0e45593d0b5f8981bb32b80" - integrity sha512-B8t4dh56TZnMLBoYWDkopf1ed37Ru/iU1qiIeBkbZWXGmNBChNZUOd//eaPOFjx8m9Sfc8bkj3FBRWt/kTAhmw== - -"@node-rs/jieba-win32-ia32-msvc@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-ia32-msvc/-/jieba-win32-ia32-msvc-1.10.3.tgz#edfb74e880a32f66a6810502957b62f9b042b487" - integrity sha512-SKuPGZJ5T+X4jOn1S8LklOSZ6HC7UBiw0hwi2z9uqX6WgElquLjGi/xfZ2gPqffeR/5K/PUu7aqYUUPL1XonVQ== - -"@node-rs/jieba-win32-x64-msvc@1.10.3": - version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-x64-msvc/-/jieba-win32-x64-msvc-1.10.3.tgz#285a24134d9c367b11d73060bdc37c351c3e60b5" - integrity sha512-j9I4+a/tf2hsLu8Sr0NhcLBVNBBQctO2mzcjemMpRa1SlEeODyic9RIyP8Ljz3YTN6MYqKh1KA9iR1xvxjxYFg== - "@node-rs/jieba@^1.6.0": version "1.10.3" - resolved "https://registry.yarnpkg.com/@node-rs/jieba/-/jieba-1.10.3.tgz#05756df55c99f2c4f68c5e41d258edec21a97395" + resolved "https://registry.npmjs.org/@node-rs/jieba/-/jieba-1.10.3.tgz" integrity sha512-SG0CWHmhIveH6upJURgymDKLertEPYbOc5NSFIpbZWW1W2MpqgumVteQO+5YBlkmpR6jMNDPWNQyQwkB6HoeNg== optionalDependencies: "@node-rs/jieba-android-arm-eabi" "1.10.3" @@ -2785,7 +1886,7 @@ "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": +"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": version "2.0.5" resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== @@ -2867,47 +1968,47 @@ "@svgr/babel-plugin-add-jsx-attribute@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz#4001f5d5dd87fa13303e36ee106e3ff3a7eb8b22" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz" integrity sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g== "@svgr/babel-plugin-remove-jsx-attribute@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz" integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA== "@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz" integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA== "@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz#8fbb6b2e91fa26ac5d4aa25c6b6e4f20f9c0ae27" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz" integrity sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ== "@svgr/babel-plugin-svg-dynamic-title@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz#1d5ba1d281363fc0f2f29a60d6d936f9bbc657b0" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz" integrity sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og== "@svgr/babel-plugin-svg-em-dimensions@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz#35e08df300ea8b1d41cb8f62309c241b0369e501" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz" integrity sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g== "@svgr/babel-plugin-transform-react-native-svg@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz#90a8b63998b688b284f255c6a5248abd5b28d754" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz" integrity sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q== "@svgr/babel-plugin-transform-svg-component@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz#013b4bfca88779711f0ed2739f3f7efcefcf4f7e" + resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz" integrity sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw== "@svgr/babel-preset@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-8.1.0.tgz#0e87119aecdf1c424840b9d4565b7137cabf9ece" + resolved "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-8.1.0.tgz" integrity sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug== dependencies: "@svgr/babel-plugin-add-jsx-attribute" "8.0.0" @@ -2919,9 +2020,9 @@ "@svgr/babel-plugin-transform-react-native-svg" "8.1.0" "@svgr/babel-plugin-transform-svg-component" "8.0.0" -"@svgr/core@8.1.0": +"@svgr/core@*", "@svgr/core@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/core/-/core-8.1.0.tgz#41146f9b40b1a10beaf5cc4f361a16a3c1885e88" + resolved "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz" integrity sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA== dependencies: "@babel/core" "^7.21.3" @@ -2932,7 +2033,7 @@ "@svgr/hast-util-to-babel-ast@8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz#6952fd9ce0f470e1aded293b792a2705faf4ffd4" + resolved "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz" integrity sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q== dependencies: "@babel/types" "^7.21.3" @@ -2940,7 +2041,7 @@ "@svgr/plugin-jsx@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz#96969f04a24b58b174ee4cd974c60475acbd6928" + resolved "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz" integrity sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA== dependencies: "@babel/core" "^7.21.3" @@ -2950,7 +2051,7 @@ "@svgr/plugin-svgo@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz#b115b7b967b564f89ac58feae89b88c3decd0f00" + resolved "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz" integrity sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA== dependencies: cosmiconfig "^8.1.3" @@ -2959,7 +2060,7 @@ "@svgr/webpack@^8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-8.1.0.tgz#16f1b5346f102f89fda6ec7338b96a701d8be0c2" + resolved "https://registry.npmjs.org/@svgr/webpack/-/webpack-8.1.0.tgz" integrity sha512-LnhVjMWyMQV9ZmeEy26maJk+8HTIbd59cH4F2MJ439k9DqejRisfFNGAPvRYlKETuh9LrImlS8aKsBgKjMA8WA== dependencies: "@babel/core" "^7.21.3" @@ -2983,13 +2084,6 @@ resolved "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz" integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== -"@tybys/wasm-util@^0.9.0": - version "0.9.0" - resolved "https://registry.yarnpkg.com/@tybys/wasm-util/-/wasm-util-0.9.0.tgz#3e75eb00604c8d6db470bf18c37b7d984a0e3355" - integrity sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw== - dependencies: - tslib "^2.4.0" - "@types/acorn@^4.0.0": version "4.0.6" resolved "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz" @@ -3245,10 +2339,10 @@ "@types/history" "^4.7.11" "@types/react" "*" -"@types/react@*": - version "18.3.1" - resolved "https://registry.npmjs.org/@types/react/-/react-18.3.1.tgz" - integrity sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw== +"@types/react@*", "@types/react@>= 16.8.0 < 19.0.0", "@types/react@>=16": + version "18.3.6" + resolved "https://registry.npmjs.org/@types/react/-/react-18.3.6.tgz" + integrity sha512-CnGaRYNu2iZlkGXGrOYtdg5mLK8neySj0woZ4e2wF/eli2E6Sazmq5X+Nrj6OBrrFVQfJWTUFeqAzoRhWQXYvg== dependencies: "@types/prop-types" "*" csstype "^3.0.2" @@ -3330,9 +2424,9 @@ resolved "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": +"@webassemblyjs/ast@^1.12.1", "@webassemblyjs/ast@1.12.1": version "1.12.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz" integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" @@ -3398,7 +2492,7 @@ "@webassemblyjs/wasm-edit@^1.12.1": version "1.12.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz" integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== dependencies: "@webassemblyjs/ast" "1.12.1" @@ -3431,7 +2525,7 @@ "@webassemblyjs/wasm-gen" "1.12.1" "@webassemblyjs/wasm-parser" "1.12.1" -"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": +"@webassemblyjs/wasm-parser@^1.12.1", "@webassemblyjs/wasm-parser@1.12.1": version "1.12.1" resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz" integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== @@ -3471,7 +2565,7 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: acorn-import-attributes@^1.9.5: version "1.9.5" - resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + resolved "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz" integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.0.0: @@ -3484,7 +2578,7 @@ acorn-walk@^8.0.0: resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz" integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== -acorn@^8.0.0, acorn@^8.0.4, acorn@^8.7.1, acorn@^8.8.2: +"acorn@^6.0.0 || ^7.0.0 || ^8.0.0", acorn@^8, acorn@^8.0.0, acorn@^8.0.4, acorn@^8.7.1, acorn@^8.8.2: version "8.11.3" resolved "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== @@ -3509,7 +2603,12 @@ ajv-formats@^2.1.1: dependencies: ajv "^8.0.0" -ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: +ajv-keywords@^3.4.1: + version "3.5.2" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== @@ -3521,7 +2620,7 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" -ajv@^6.12.2, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.5, ajv@^6.9.1: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -3531,7 +2630,7 @@ ajv@^6.12.2, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0, ajv@^8.9.0: +ajv@^8.0.0, ajv@^8.8.2, ajv@^8.9.0: version "8.12.0" resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== @@ -3542,31 +2641,32 @@ ajv@^8.0.0, ajv@^8.9.0: uri-js "^4.2.2" algoliasearch-helper@^3.13.3: - version "3.16.3" - resolved "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.16.3.tgz" - integrity sha512-1OuJT6sONAa9PxcOmWo5WCAT3jQSpCR9/m5Azujja7nhUQwAUDvaaAYrcmUySsrvHh74usZHbE3jFfGnWtZj8w== + version "3.22.5" + resolved "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.22.5.tgz" + integrity sha512-lWvhdnc+aKOKx8jyA3bsdEgHzm/sglC4cYdMG4xSQyRiPLJVJtH/IVYZG3Hp6PkTEhQqhyVYkeP9z2IlcHJsWw== dependencies: "@algolia/events" "^4.0.1" -algoliasearch@^4.18.0, algoliasearch@^4.19.1: - version "4.22.1" - resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.22.1.tgz" - integrity sha512-jwydKFQJKIx9kIZ8Jm44SdpigFwRGPESaxZBaHSV0XWN2yBJAOT4mT7ppvlrpA4UGzz92pqFnVKr/kaZXrcreg== - dependencies: - "@algolia/cache-browser-local-storage" "4.22.1" - "@algolia/cache-common" "4.22.1" - "@algolia/cache-in-memory" "4.22.1" - "@algolia/client-account" "4.22.1" - "@algolia/client-analytics" "4.22.1" - "@algolia/client-common" "4.22.1" - "@algolia/client-personalization" "4.22.1" - "@algolia/client-search" "4.22.1" - "@algolia/logger-common" "4.22.1" - "@algolia/logger-console" "4.22.1" - "@algolia/requester-browser-xhr" "4.22.1" - "@algolia/requester-common" "4.22.1" - "@algolia/requester-node-http" "4.22.1" - "@algolia/transporter" "4.22.1" +algoliasearch@^4.18.0, algoliasearch@^4.19.1, "algoliasearch@>= 3.1 < 6", "algoliasearch@>= 4.9.1 < 6": + version "4.24.0" + resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.24.0.tgz" + integrity sha512-bf0QV/9jVejssFBmz2HQLxUadxk574t4iwjCKp5E7NBzwKkrDEhKPISIIjAU/p6K5qDx3qoeh4+26zWN1jmw3g== + dependencies: + "@algolia/cache-browser-local-storage" "4.24.0" + "@algolia/cache-common" "4.24.0" + "@algolia/cache-in-memory" "4.24.0" + "@algolia/client-account" "4.24.0" + "@algolia/client-analytics" "4.24.0" + "@algolia/client-common" "4.24.0" + "@algolia/client-personalization" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/logger-console" "4.24.0" + "@algolia/recommend" "4.24.0" + "@algolia/requester-browser-xhr" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/requester-node-http" "4.24.0" + "@algolia/transporter" "4.24.0" ansi-align@^3.0.1: version "3.0.1" @@ -3654,21 +2754,9 @@ at-least-node@^1.0.0: resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz" integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== -autoprefixer@^10.4.14: - version "10.4.18" - resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.18.tgz" - integrity sha512-1DKbDfsr6KUElM6wg+0zRNkB/Q7WcKYAaK+pzXn+Xqmszm/5Xa9coeNdtP88Vi+dPzZnMjhge8GIV49ZQkDa+g== - dependencies: - browserslist "^4.23.0" - caniuse-lite "^1.0.30001591" - fraction.js "^4.3.7" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -autoprefixer@^10.4.19: +autoprefixer@^10.4.14, autoprefixer@^10.4.19: version "10.4.19" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.19.tgz#ad25a856e82ee9d7898c59583c1afeb3fa65f89f" + resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz" integrity sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew== dependencies: browserslist "^4.23.0" @@ -3693,15 +2781,6 @@ babel-plugin-dynamic-import-node@^2.3.3: dependencies: object.assign "^4.1.0" -babel-plugin-polyfill-corejs2@^0.4.10: - version "0.4.11" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz#30320dfe3ffe1a336c15afdcdafd6fd615b25e33" - integrity sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q== - dependencies: - "@babel/compat-data" "^7.22.6" - "@babel/helper-define-polyfill-provider" "^0.6.2" - semver "^6.3.1" - babel-plugin-polyfill-corejs2@^0.4.8: version "0.4.10" resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.10.tgz" @@ -3711,14 +2790,6 @@ babel-plugin-polyfill-corejs2@^0.4.8: "@babel/helper-define-polyfill-provider" "^0.6.1" semver "^6.3.1" -babel-plugin-polyfill-corejs3@^0.10.4: - version "0.10.4" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.4.tgz#789ac82405ad664c20476d0233b485281deb9c77" - integrity sha512-25J6I8NGfa5YkCDogHRID3fVCadIR8/pGl1/spvCkzb6lVn6SR3ojpx9nOn9iEBcUsjY24AmdKm5khcfKdylcg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.6.1" - core-js-compat "^3.36.1" - babel-plugin-polyfill-corejs3@^0.9.0: version "0.9.0" resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.9.0.tgz" @@ -3734,13 +2805,6 @@ babel-plugin-polyfill-regenerator@^0.5.5: dependencies: "@babel/helper-define-polyfill-provider" "^0.5.0" -babel-plugin-polyfill-regenerator@^0.6.1: - version "0.6.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz#addc47e240edd1da1058ebda03021f382bba785e" - integrity sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.6.2" - bail@^2.0.0: version "2.0.2" resolved "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz" @@ -3766,10 +2830,10 @@ binary-extensions@^2.0.0: resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz" integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -3779,7 +2843,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -3835,12 +2899,12 @@ brace-expansion@^1.1.7: braces@^3.0.3, braces@~3.0.2: version "3.0.3" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz" integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: fill-range "^7.1.1" -browserslist@^4.0.0, browserslist@^4.18.1, browserslist@^4.21.10, browserslist@^4.22.2, browserslist@^4.22.3, browserslist@^4.23.0: +browserslist@^4.0.0, browserslist@^4.18.1, browserslist@^4.21.10, browserslist@^4.22.2, browserslist@^4.22.3, browserslist@^4.23.0, "browserslist@>= 4.21.0": version "4.23.0" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz" integrity sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ== @@ -3927,16 +2991,11 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001587, caniuse-lite@^1.0.30001591: +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001587, caniuse-lite@^1.0.30001599: version "1.0.30001599" resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001599.tgz" integrity sha512-LRAQHZ4yT1+f9LemSMeqdMpMxZcc4RMWdj4tiFe3G8tNkWK+E58g+/tzotb5cU6TbcVJLr4fySiAW7XmxQvZQA== -caniuse-lite@^1.0.30001599: - version "1.0.30001636" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001636.tgz#b15f52d2bdb95fad32c2f53c0b68032b85188a78" - integrity sha512-bMg2vmr8XBsbL6Lr0UHXy/21m84FTxDLWn2FSqMd5PrlbMxwJlQnC2YWYxVgp66PZE+BBNF2jYQUBKCo1FDeZg== - ccount@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz" @@ -4001,7 +3060,24 @@ cheerio-select@^2.1.0: domhandler "^5.0.3" domutils "^3.0.1" -cheerio@^1.0.0-rc.12, cheerio@^1.0.0-rc.3: +cheerio@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0.tgz" + integrity sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.1.0" + encoding-sniffer "^0.2.0" + htmlparser2 "^9.1.0" + parse5 "^7.1.2" + parse5-htmlparser2-tree-adapter "^7.0.0" + parse5-parser-stream "^7.1.2" + undici "^6.19.5" + whatwg-mimetype "^4.0.0" + +cheerio@1.0.0-rc.12: version "1.0.0-rc.12" resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz" integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== @@ -4076,17 +3152,12 @@ clone-deep@^4.0.1: clsx@^1.1.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" + resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== -clsx@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz" - integrity sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg== - -clsx@^2.1.1: +clsx@^2.0.0, clsx@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz" integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== collapse-white-space@^2.0.0: @@ -4108,19 +3179,19 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + colord@^2.9.3: version "2.9.3" - resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + resolved "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz" integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== colorette@^2.0.10: @@ -4138,11 +3209,6 @@ comma-separated-tokens@^2.0.0: resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz" integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== -commander@7, commander@^7.2.0: - version "7.2.0" - resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - commander@^10.0.0: version "10.0.1" resolved "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz" @@ -4158,11 +3224,21 @@ commander@^5.1.0: resolved "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz" integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== +commander@^7.2.0: + version "7.2.0" + resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + commander@^8.3.0: version "8.3.0" resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== +commander@7: + version "7.2.0" + resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + common-path-prefix@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz" @@ -4278,13 +3354,6 @@ core-js-compat@^3.31.0, core-js-compat@^3.34.0: dependencies: browserslist "^4.22.3" -core-js-compat@^3.36.1: - version "3.37.1" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.37.1.tgz#c844310c7852f4bdf49b8d339730b97e17ff09ee" - integrity sha512-9TNiImhKvQqSUkOvk/mMRZzOANTiEVC7WaBNhHcKM7x+/5E1l5NvsysR19zuDQScE8k+kfQXWRN3AtS/eOSHpg== - dependencies: - browserslist "^4.23.0" - core-js-pure@^3.30.2: version "3.36.0" resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.36.0.tgz" @@ -4346,7 +3415,7 @@ crypto-random-string@^4.0.0: css-declaration-sorter@^7.2.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz#6dec1c9523bc4a643e088aab8f09e67a54961024" + resolved "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz" integrity sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow== css-loader@^6.8.1: @@ -4365,7 +3434,7 @@ css-loader@^6.8.1: css-minimizer-webpack-plugin@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz#33effe662edb1a0bf08ad633c32fa75d0f7ec565" + resolved "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz" integrity sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg== dependencies: "@jridgewell/trace-mapping" "^0.3.18" @@ -4399,7 +3468,7 @@ css-select@^5.1.0: css-tree@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.3.1.tgz#10264ce1e5442e8572fc82fbe490644ff54b5c20" + resolved "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz" integrity sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw== dependencies: mdn-data "2.0.30" @@ -4407,7 +3476,7 @@ css-tree@^2.3.1: css-tree@~2.2.0: version "2.2.1" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.2.1.tgz#36115d382d60afd271e377f9c5f67d02bd48c032" + resolved "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz" integrity sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA== dependencies: mdn-data "2.0.28" @@ -4425,7 +3494,7 @@ cssesc@^3.0.0: cssnano-preset-advanced@^6.1.2: version "6.1.2" - resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz#82b090872b8f98c471f681d541c735acf8b94d3f" + resolved "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz" integrity sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ== dependencies: autoprefixer "^10.4.19" @@ -4438,7 +3507,7 @@ cssnano-preset-advanced@^6.1.2: cssnano-preset-default@^6.1.2: version "6.1.2" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz#adf4b89b975aa775f2750c89dbaf199bbd9da35e" + resolved "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz" integrity sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg== dependencies: browserslist "^4.23.0" @@ -4474,12 +3543,12 @@ cssnano-preset-default@^6.1.2: cssnano-utils@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-4.0.2.tgz#56f61c126cd0f11f2eef1596239d730d9fceff3c" + resolved "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz" integrity sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ== cssnano@^6.0.1, cssnano@^6.1.2: version "6.1.2" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-6.1.2.tgz#4bd19e505bd37ee7cf0dc902d3d869f6d79c66b8" + resolved "https://registry.npmjs.org/cssnano/-/cssnano-6.1.2.tgz" integrity sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA== dependencies: cssnano-preset-default "^6.1.2" @@ -4487,7 +3556,7 @@ cssnano@^6.0.1, cssnano@^6.1.2: csso@^5.0.5: version "5.0.5" - resolved "https://registry.yarnpkg.com/csso/-/csso-5.0.5.tgz#f9b7fe6cc6ac0b7d90781bb16d5e9874303e2ca6" + resolved "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz" integrity sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ== dependencies: css-tree "~2.2.0" @@ -4504,7 +3573,7 @@ cytoscape-cose-bilkent@^4.1.0: dependencies: cose-base "^1.0.0" -cytoscape@^3.28.1: +cytoscape@^3.2.0, cytoscape@^3.28.1: version "3.28.1" resolved "https://registry.npmjs.org/cytoscape/-/cytoscape-3.28.1.tgz" integrity sha512-xyItz4O/4zp9/239wCcH8ZcFuuZooEeF8KHRmzjDfGdXsj3OG9MFSMA0pJE0uX3uCN/ygof6hHf4L7lst+JaDg== @@ -4512,6 +3581,13 @@ cytoscape@^3.28.1: heap "^0.2.6" lodash "^4.17.21" +d3-array@^3.2.0, "d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: + version "3.2.4" + resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz" + integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== + dependencies: + internmap "1 - 2" + "d3-array@1 - 2": version "2.12.1" resolved "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz" @@ -4519,13 +3595,6 @@ cytoscape@^3.28.1: dependencies: internmap "^1.0.0" -"d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3, d3-array@^3.2.0: - version "3.2.4" - resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz" - integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== - dependencies: - internmap "1 - 2" - d3-axis@3: version "3.0.0" resolved "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz" @@ -4635,16 +3704,16 @@ d3-hierarchy@3: dependencies: d3-color "1 - 3" +d3-path@^3.1.0, "d3-path@1 - 3", d3-path@3: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz" + integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== + d3-path@1: version "1.0.9" resolved "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz" integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== -"d3-path@1 - 3", d3-path@3, d3-path@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz" - integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== - d3-polygon@3: version "3.0.1" resolved "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz" @@ -4692,13 +3761,6 @@ d3-scale@4: resolved "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz" integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ== -d3-shape@3: - version "3.2.0" - resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz" - integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== - dependencies: - d3-path "^3.1.0" - d3-shape@^1.2.0: version "1.3.7" resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz" @@ -4706,6 +3768,13 @@ d3-shape@^1.2.0: dependencies: d3-path "1" +d3-shape@3: + version "3.2.0" + resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz" + integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== + dependencies: + d3-path "^3.1.0" + "d3-time-format@2 - 4", d3-time-format@4: version "4.1.0" resolved "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz" @@ -4801,26 +3870,26 @@ debounce@^1.2.1: resolved "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz" integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== -debug@2.6.9, debug@^2.6.0: +debug@^2.6.0: version "2.6.9" resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: +debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@4: version "4.3.4" resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" -debug@^4.2.0: - version "4.3.5" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.5.tgz#e83444eceb9fedd4a1da56d671ae2446a01a6e1e" - integrity sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg== +debug@2.6.9: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: - ms "2.1.2" + ms "2.0.0" decode-named-character-reference@^1.0.0: version "1.0.2" @@ -4902,16 +3971,16 @@ delaunator@5: dependencies: robust-predicates "^3.0.2" -depd@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - depd@~1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== +depd@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + dequal@^2.0.0: version "2.0.3" resolved "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz" @@ -5026,9 +4095,9 @@ domhandler@^5.0.2, domhandler@^5.0.3: domelementtype "^2.3.0" dompurify@^3.0.5: - version "3.0.9" - resolved "https://registry.npmjs.org/dompurify/-/dompurify-3.0.9.tgz" - integrity sha512-uyb4NDIvQ3hRn6NiC+SIFaP4mJ/MdXlvtunaqK9Bn6dD3RuB/1S/gasEjDHD8eiaqdSael2vBv+hOs7Y+jhYOQ== + version "3.1.6" + resolved "https://registry.npmjs.org/dompurify/-/dompurify-3.1.6.tgz" + integrity sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ== domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" @@ -5039,7 +4108,7 @@ domutils@^2.5.2, domutils@^2.8.0: domelementtype "^2.2.0" domhandler "^4.2.0" -domutils@^3.0.1: +domutils@^3.0.1, domutils@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz" integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA== @@ -5118,9 +4187,22 @@ encodeurl@~1.0.2: resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + +encoding-sniffer@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/encoding-sniffer/-/encoding-sniffer-0.2.0.tgz" + integrity sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg== + dependencies: + iconv-lite "^0.6.3" + whatwg-encoding "^3.1.1" + enhanced-resolve@^5.17.1: version "5.17.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz" integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" @@ -5131,7 +4213,7 @@ entities@^2.0.0: resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -entities@^4.2.0, entities@^4.4.0: +entities@^4.2.0, entities@^4.4.0, entities@^4.5.0: version "4.5.0" resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz" integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== @@ -5323,36 +4405,36 @@ execa@^5.0.0: strip-final-newline "^2.0.0" express@^4.17.3: - version "4.19.2" - resolved "https://registry.npmjs.org/express/-/express-4.19.2.tgz" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.0" + resolved "https://registry.npmjs.org/express/-/express-4.21.0.tgz" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -5427,7 +4509,7 @@ feed@^4.2.2: dependencies: xml-js "^1.6.11" -file-loader@^6.2.0: +file-loader@*, file-loader@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== @@ -5442,18 +4524,18 @@ filesize@^8.0.6: fill-range@^7.1.1: version "7.1.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz" integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: to-regex-range "^5.0.1" -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -5547,7 +4629,7 @@ fresh@0.5.2: fs-extra@^10.0.0: version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== dependencies: graceful-fs "^4.2.0" @@ -5581,12 +4663,7 @@ fs-monkey@^1.0.4: fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== function-bind@^1.1.2: version "1.1.2" @@ -5730,16 +4807,16 @@ got@^12.1.0: p-cancelable "^3.0.0" responselike "^3.0.0" -graceful-fs@4.2.10: - version "4.2.10" - resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== +graceful-fs@4.2.10: + version "4.2.10" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + gray-matter@^4.0.3: version "4.0.3" resolved "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz" @@ -6031,6 +5108,16 @@ htmlparser2@^8.0.1: domutils "^3.0.1" entities "^4.4.0" +htmlparser2@^9.1.0: + version "9.1.0" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.1.0.tgz" + integrity sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.1.0" + entities "^4.5.0" + http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz" @@ -6041,6 +5128,16 @@ http-deceiver@^1.2.7: resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + http-errors@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" @@ -6052,16 +5149,6 @@ http-errors@2.0.0: statuses "2.0.1" toidentifier "1.0.1" -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - http-parser-js@>=0.5.1: version "0.5.8" resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" @@ -6100,6 +5187,13 @@ human-signals@^2.1.0: resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== +iconv-lite@^0.6.3, iconv-lite@0.6, iconv-lite@0.6.3: + version "0.6.3" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + iconv-lite@0.4.24: version "0.4.24" resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" @@ -6107,13 +5201,6 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -iconv-lite@0.6: - version "0.6.3" - resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - icss-utils@^5.0.0, icss-utils@^5.1.0: version "5.1.0" resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" @@ -6133,7 +5220,7 @@ image-size@^1.0.2: immediate@^3.2.3: version "3.3.0" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.3.0.tgz#1aef225517836bcdf7f2a2de2600c79ff0269266" + resolved "https://registry.npmjs.org/immediate/-/immediate-3.3.0.tgz" integrity sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q== immer@^9.0.7: @@ -6164,10 +5251,10 @@ indent-string@^4.0.0: resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infima@0.2.0-alpha.43: - version "0.2.0-alpha.43" - resolved "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.43.tgz" - integrity sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ== +infima@0.2.0-alpha.44: + version "0.2.0-alpha.44" + resolved "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.44.tgz" + integrity sha512-tuRkUSO/lB3rEhLJk25atwAjgLuzq070+pOW8XcvpHky/YbENnRRdPd85IBkyeTgttmOy5ah+yHYsK1HhUd4lQ== inflight@^1.0.4: version "1.0.6" @@ -6177,7 +5264,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: +inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3, inherits@2, inherits@2.0.4: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -6187,16 +5274,16 @@ inherits@2.0.3: resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== -ini@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" - integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== - ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.8" resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== +ini@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" + integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== + inline-style-parser@0.1.1: version "0.1.1" resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" @@ -6207,16 +5294,16 @@ inline-style-parser@0.2.2: resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.2.tgz" integrity sha512-EcKzdTHVe8wFVOGEYXiW9WmJXPjqi1T+234YpJr98RiFYKHV3cdy1+3mkTE+KHTHxFFLH51SfaGOoUdW+v7ViQ== -"internmap@1 - 2": - version "2.0.3" - resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz" - integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== - internmap@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz" integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + interpret@^1.0.0: version "1.4.0" resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz" @@ -6229,16 +5316,16 @@ invariant@^2.2.4: dependencies: loose-envify "^1.0.0" -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - ipaddr.js@^2.0.1: version "2.1.0" resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.1.0.tgz" integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + is-alphabetical@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz" @@ -6414,16 +5501,16 @@ is-yarn-global@^0.4.0: resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.4.1.tgz" integrity sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ== -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - isarray@~1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + isexe@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" @@ -6457,7 +5544,7 @@ jest-worker@^27.4.5: jest-worker@^29.4.3: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz" integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== dependencies: "@types/node" "*" @@ -6571,7 +5658,7 @@ kind-of@^6.0.0, kind-of@^6.0.2: klaw-sync@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/klaw-sync/-/klaw-sync-6.0.0.tgz#1fd2cfd56ebb6250181114f0a581167099c2b28c" + resolved "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz" integrity sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ== dependencies: graceful-fs "^4.1.11" @@ -6613,7 +5700,7 @@ leven@^3.1.0: lilconfig@^3.1.1: version "3.1.2" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.2.tgz#e4a7c3cb549e3a606c8dcc32e5ae1005e62c05cb" + resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.2.tgz" integrity sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow== lines-and-columns@^1.1.6: @@ -6727,17 +5814,17 @@ lru-cache@^6.0.0: lunr-languages@^1.4.0: version "1.14.0" - resolved "https://registry.yarnpkg.com/lunr-languages/-/lunr-languages-1.14.0.tgz#6e97635f434631729dd0e5654daedd291cd6f2d0" + resolved "https://registry.npmjs.org/lunr-languages/-/lunr-languages-1.14.0.tgz" integrity sha512-hWUAb2KqM3L7J5bcrngszzISY4BxrXn/Xhbb9TTCJYEGqlR1nG67/M14sp09+PTIRklobrn57IAxcdcO/ZFyNA== lunr@^2.3.9: version "2.3.9" - resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + resolved "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz" integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== mark.js@^8.11.1: version "8.11.1" - resolved "https://registry.yarnpkg.com/mark.js/-/mark.js-8.11.1.tgz#180f1f9ebef8b0e638e4166ad52db879beb2ffc5" + resolved "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz" integrity sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ== markdown-extensions@^2.0.0: @@ -6994,12 +6081,12 @@ mdast-util-to-string@^4.0.0: mdn-data@2.0.28: version "2.0.28" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.28.tgz#5ec48e7bef120654539069e1ae4ddc81ca490eba" + resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz" integrity sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g== mdn-data@2.0.30: version "2.0.30" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.30.tgz#ce4df6f80af6cfbe218ecd5c552ba13c4dfa08cc" + resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz" integrity sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA== media-typer@0.3.0: @@ -7014,10 +6101,10 @@ memfs@^3.1.2, memfs@^3.4.3: dependencies: fs-monkey "^1.0.4" -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -7646,13 +6733,13 @@ micromark@^4.0.0: micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.8" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz" integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: braces "^3.0.3" picomatch "^2.3.1" -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": +"mime-db@>= 1.43.0 < 2": version "1.52.0" resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== @@ -7662,14 +6749,40 @@ mime-db@~1.33.0: resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== -mime-types@2.1.18, mime-types@~2.1.17: +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.27: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@^2.1.31: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@~2.1.17, mime-types@2.1.18: version "2.1.18" resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz" integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ== dependencies: mime-db "~1.33.0" -mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@~2.1.24: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@~2.1.34: version "2.1.35" resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -7709,7 +6822,7 @@ minimalistic-assert@^1.0.0: resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: +minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@3.1.2: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -8035,7 +7148,14 @@ parse5-htmlparser2-tree-adapter@^7.0.0: domhandler "^5.0.2" parse5 "^7.0.0" -parse5@^7.0.0: +parse5-parser-stream@^7.1.2: + version "7.1.2" + resolved "https://registry.npmjs.org/parse5-parser-stream/-/parse5-parser-stream-7.1.2.tgz" + integrity sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow== + dependencies: + parse5 "^7.0.0" + +parse5@^7.0.0, parse5@^7.1.2: version "7.1.2" resolved "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz" integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== @@ -8090,23 +7210,23 @@ path-parse@^1.0.7: resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@^1.7.0: + version "1.9.0" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz" + integrity sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g== + dependencies: + isarray "0.0.1" + +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-to-regexp@2.2.1: version "2.2.1" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz" integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - path-type@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" @@ -8147,7 +7267,7 @@ pkg-up@^3.1.0: postcss-calc@^9.0.1: version "9.0.1" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-9.0.1.tgz#a744fd592438a93d6de0f1434c572670361eb6c6" + resolved "https://registry.npmjs.org/postcss-calc/-/postcss-calc-9.0.1.tgz" integrity sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ== dependencies: postcss-selector-parser "^6.0.11" @@ -8155,7 +7275,7 @@ postcss-calc@^9.0.1: postcss-colormin@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-6.1.0.tgz#076e8d3fb291fbff7b10e6b063be9da42ff6488d" + resolved "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-6.1.0.tgz" integrity sha512-x9yX7DOxeMAR+BgGVnNSAxmAj98NX/YxEMNFP+SDCEeNLb2r3i6Hh1ksMsnW8Ub5SLCpbescQqn9YEbE9554Sw== dependencies: browserslist "^4.23.0" @@ -8165,7 +7285,7 @@ postcss-colormin@^6.1.0: postcss-convert-values@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-6.1.0.tgz#3498387f8efedb817cbc63901d45bd1ceaa40f48" + resolved "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-6.1.0.tgz" integrity sha512-zx8IwP/ts9WvUM6NkVSkiU902QZL1bwPhaVaLynPtCsOTqp+ZKbNi+s6XJg3rfqpKGA/oc7Oxk5t8pOQJcwl/w== dependencies: browserslist "^4.23.0" @@ -8173,27 +7293,27 @@ postcss-convert-values@^6.1.0: postcss-discard-comments@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-6.0.2.tgz#e768dcfdc33e0216380623652b0a4f69f4678b6c" + resolved "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-6.0.2.tgz" integrity sha512-65w/uIqhSBBfQmYnG92FO1mWZjJ4GL5b8atm5Yw2UgrwD7HiNiSSNwJor1eCFGzUgYnN/iIknhNRVqjrrpuglw== postcss-discard-duplicates@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.3.tgz#d121e893c38dc58a67277f75bb58ba43fce4c3eb" + resolved "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.3.tgz" integrity sha512-+JA0DCvc5XvFAxwx6f/e68gQu/7Z9ud584VLmcgto28eB8FqSFZwtrLwB5Kcp70eIoWP/HXqz4wpo8rD8gpsTw== postcss-discard-empty@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-6.0.3.tgz#ee39c327219bb70473a066f772621f81435a79d9" + resolved "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-6.0.3.tgz" integrity sha512-znyno9cHKQsK6PtxL5D19Fj9uwSzC2mB74cpT66fhgOadEUPyXFkbgwm5tvc3bt3NAy8ltE5MrghxovZRVnOjQ== postcss-discard-overridden@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.2.tgz#4e9f9c62ecd2df46e8fdb44dc17e189776572e2d" + resolved "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-6.0.2.tgz" integrity sha512-j87xzI4LUggC5zND7KdjsI25APtyMuynXZSujByMaav2roV6OZX+8AaCUcZSWqckZpjAjRyFDdpqybgjFO0HJQ== postcss-discard-unused@^6.0.5: version "6.0.5" - resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-6.0.5.tgz#c1b0e8c032c6054c3fbd22aaddba5b248136f338" + resolved "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-6.0.5.tgz" integrity sha512-wHalBlRHkaNnNwfC8z+ppX57VhvS+HWgjW508esjdaEYr3Mx7Gnn2xA4R/CKf5+Z9S5qsqC+Uzh4ueENWwCVUA== dependencies: postcss-selector-parser "^6.0.16" @@ -8209,7 +7329,7 @@ postcss-loader@^7.3.3: postcss-merge-idents@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-6.0.3.tgz#7b9c31c7bc823c94bec50f297f04e3c2b838ea65" + resolved "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-6.0.3.tgz" integrity sha512-1oIoAsODUs6IHQZkLQGO15uGEbK3EAl5wi9SS8hs45VgsxQfMnxvt+L+zIr7ifZFIH14cfAeVe2uCTa+SPRa3g== dependencies: cssnano-utils "^4.0.2" @@ -8217,7 +7337,7 @@ postcss-merge-idents@^6.0.3: postcss-merge-longhand@^6.0.5: version "6.0.5" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.5.tgz#ba8a8d473617c34a36abbea8dda2b215750a065a" + resolved "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-6.0.5.tgz" integrity sha512-5LOiordeTfi64QhICp07nzzuTDjNSO8g5Ksdibt44d+uvIIAE1oZdRn8y/W5ZtYgRH/lnLDlvi9F8btZcVzu3w== dependencies: postcss-value-parser "^4.2.0" @@ -8225,7 +7345,7 @@ postcss-merge-longhand@^6.0.5: postcss-merge-rules@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-6.1.1.tgz#7aa539dceddab56019469c0edd7d22b64c3dea9d" + resolved "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-6.1.1.tgz" integrity sha512-KOdWF0gju31AQPZiD+2Ar9Qjowz1LTChSjFFbS+e2sFgc4uHOp3ZvVX4sNeTlk0w2O31ecFGgrFzhO0RSWbWwQ== dependencies: browserslist "^4.23.0" @@ -8235,14 +7355,14 @@ postcss-merge-rules@^6.1.1: postcss-minify-font-values@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-6.1.0.tgz#a0e574c02ee3f299be2846369211f3b957ea4c59" + resolved "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-6.1.0.tgz" integrity sha512-gklfI/n+9rTh8nYaSJXlCo3nOKqMNkxuGpTn/Qm0gstL3ywTr9/WRKznE+oy6fvfolH6dF+QM4nCo8yPLdvGJg== dependencies: postcss-value-parser "^4.2.0" postcss-minify-gradients@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-6.0.3.tgz#ca3eb55a7bdb48a1e187a55c6377be918743dbd6" + resolved "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-6.0.3.tgz" integrity sha512-4KXAHrYlzF0Rr7uc4VrfwDJ2ajrtNEpNEuLxFgwkhFZ56/7gaE4Nr49nLsQDZyUe+ds+kEhf+YAUolJiYXF8+Q== dependencies: colord "^2.9.3" @@ -8251,7 +7371,7 @@ postcss-minify-gradients@^6.0.3: postcss-minify-params@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-6.1.0.tgz#54551dec77b9a45a29c3cb5953bf7325a399ba08" + resolved "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-6.1.0.tgz" integrity sha512-bmSKnDtyyE8ujHQK0RQJDIKhQ20Jq1LYiez54WiaOoBtcSuflfK3Nm596LvbtlFcpipMjgClQGyGr7GAs+H1uA== dependencies: browserslist "^4.23.0" @@ -8260,7 +7380,7 @@ postcss-minify-params@^6.1.0: postcss-minify-selectors@^6.0.4: version "6.0.4" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-6.0.4.tgz#197f7d72e6dd19eed47916d575d69dc38b396aff" + resolved "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-6.0.4.tgz" integrity sha512-L8dZSwNLgK7pjTto9PzWRoMbnLq5vsZSTu8+j1P/2GB8qdtGQfn+K1uSvFgYvgh83cbyxT5m43ZZhUMTJDSClQ== dependencies: postcss-selector-parser "^6.0.16" @@ -8295,47 +7415,47 @@ postcss-modules-values@^4.0.0: postcss-normalize-charset@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-6.0.2.tgz#1ec25c435057a8001dac942942a95ffe66f721e1" + resolved "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-6.0.2.tgz" integrity sha512-a8N9czmdnrjPHa3DeFlwqst5eaL5W8jYu3EBbTTkI5FHkfMhFZh1EGbku6jhHhIzTA6tquI2P42NtZ59M/H/kQ== postcss-normalize-display-values@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.2.tgz#54f02764fed0b288d5363cbb140d6950dbbdd535" + resolved "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.2.tgz" integrity sha512-8H04Mxsb82ON/aAkPeq8kcBbAtI5Q2a64X/mnRRfPXBq7XeogoQvReqxEfc0B4WPq1KimjezNC8flUtC3Qz6jg== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-positions@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-6.0.2.tgz#e982d284ec878b9b819796266f640852dbbb723a" + resolved "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-6.0.2.tgz" integrity sha512-/JFzI441OAB9O7VnLA+RtSNZvQ0NCFZDOtp6QPFo1iIyawyXg0YI3CYM9HBy1WvwCRHnPep/BvI1+dGPKoXx/Q== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-repeat-style@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.2.tgz#f8006942fd0617c73f049dd8b6201c3a3040ecf3" + resolved "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.2.tgz" integrity sha512-YdCgsfHkJ2jEXwR4RR3Tm/iOxSfdRt7jplS6XRh9Js9PyCR/aka/FCb6TuHT2U8gQubbm/mPmF6L7FY9d79VwQ== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-string@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-6.0.2.tgz#e3cc6ad5c95581acd1fc8774b309dd7c06e5e363" + resolved "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-6.0.2.tgz" integrity sha512-vQZIivlxlfqqMp4L9PZsFE4YUkWniziKjQWUtsxUiVsSSPelQydwS8Wwcuw0+83ZjPWNTl02oxlIvXsmmG+CiQ== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-timing-functions@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.2.tgz#40cb8726cef999de984527cbd9d1db1f3e9062c0" + resolved "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.2.tgz" integrity sha512-a+YrtMox4TBtId/AEwbA03VcJgtyW4dGBizPl7e88cTFULYsprgHWTbfyjSLyHeBcK/Q9JhXkt2ZXiwaVHoMzA== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-unicode@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-6.1.0.tgz#aaf8bbd34c306e230777e80f7f12a4b7d27ce06e" + resolved "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-6.1.0.tgz" integrity sha512-QVC5TQHsVj33otj8/JD869Ndr5Xcc/+fwRh4HAsFsAeygQQXm+0PySrKbr/8tkDKzW+EVT3QkqZMfFrGiossDg== dependencies: browserslist "^4.23.0" @@ -8343,21 +7463,21 @@ postcss-normalize-unicode@^6.1.0: postcss-normalize-url@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-6.0.2.tgz#292792386be51a8de9a454cb7b5c58ae22db0f79" + resolved "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-6.0.2.tgz" integrity sha512-kVNcWhCeKAzZ8B4pv/DnrU1wNh458zBNp8dh4y5hhxih5RZQ12QWMuQrDgPRw3LRl8mN9vOVfHl7uhvHYMoXsQ== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-whitespace@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.2.tgz#fbb009e6ebd312f8b2efb225c2fcc7cf32b400cd" + resolved "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.2.tgz" integrity sha512-sXZ2Nj1icbJOKmdjXVT9pnyHQKiSAyuNQHSgRCUgThn2388Y9cGVDR+E9J9iAYbSbLHI+UUwLVl1Wzco/zgv0Q== dependencies: postcss-value-parser "^4.2.0" postcss-ordered-values@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-6.0.2.tgz#366bb663919707093451ab70c3f99c05672aaae5" + resolved "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-6.0.2.tgz" integrity sha512-VRZSOB+JU32RsEAQrO94QPkClGPKJEL/Z9PCBImXMhIeK5KAYo6slP/hBYlLgrCjFxyqvn5VC81tycFEDBLG1Q== dependencies: cssnano-utils "^4.0.2" @@ -8365,14 +7485,14 @@ postcss-ordered-values@^6.0.2: postcss-reduce-idents@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-6.0.3.tgz#b0d9c84316d2a547714ebab523ec7d13704cd486" + resolved "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-6.0.3.tgz" integrity sha512-G3yCqZDpsNPoQgbDUy3T0E6hqOQ5xigUtBQyrmq3tn2GxlyiL0yyl7H+T8ulQR6kOcHJ9t7/9H4/R2tv8tJbMA== dependencies: postcss-value-parser "^4.2.0" postcss-reduce-initial@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-6.1.0.tgz#4401297d8e35cb6e92c8e9586963e267105586ba" + resolved "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-6.1.0.tgz" integrity sha512-RarLgBK/CrL1qZags04oKbVbrrVK2wcxhvta3GCxrZO4zveibqbRPmm2VI8sSgCXwoUHEliRSbOfpR0b/VIoiw== dependencies: browserslist "^4.23.0" @@ -8380,37 +7500,29 @@ postcss-reduce-initial@^6.1.0: postcss-reduce-transforms@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.2.tgz#6fa2c586bdc091a7373caeee4be75a0f3e12965d" + resolved "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.2.tgz" integrity sha512-sB+Ya++3Xj1WaT9+5LOOdirAxP7dJZms3GRcYheSPi1PiTMigsxHAdkrbItHxwYHr4kt1zL7mmcHstgMYT+aiA== dependencies: postcss-value-parser "^4.2.0" -postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.16: +postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.16, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: version "6.1.0" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz#49694cb4e7c649299fea510a29fa6577104bcf53" + resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz" integrity sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ== dependencies: cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: - version "6.0.16" - resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz" - integrity sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - postcss-sort-media-queries@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-5.2.0.tgz#4556b3f982ef27d3bac526b99b6c0d3359a6cf97" + resolved "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-5.2.0.tgz" integrity sha512-AZ5fDMLD8SldlAYlvi8NIqo0+Z8xnXU2ia0jxmuhxAU+Lqt9K+AlmLNJ/zWEnE9x+Zx3qL3+1K20ATgNOr3fAA== dependencies: sort-css-media-queries "2.2.0" postcss-svgo@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-6.0.3.tgz#1d6e180d6df1fa8a3b30b729aaa9161e94f04eaa" + resolved "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-6.0.3.tgz" integrity sha512-dlrahRmxP22bX6iKEjOM+c8/1p+81asjKT+V5lrgOH944ryx/OHpclnIbGsKVd3uWOXFLYJwCVf0eEkJGvO96g== dependencies: postcss-value-parser "^4.2.0" @@ -8418,7 +7530,7 @@ postcss-svgo@^6.0.3: postcss-unique-selectors@^6.0.4: version "6.0.4" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-6.0.4.tgz#983ab308896b4bf3f2baaf2336e14e52c11a2088" + resolved "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-6.0.4.tgz" integrity sha512-K38OCaIrO8+PzpArzkLKB42dSARtC2tmG6PvD4b1o1Q2E9Os8jzfWFfSy/rixsHwohtsDdFtAWGjFVFUdwYaMg== dependencies: postcss-selector-parser "^6.0.16" @@ -8430,21 +7542,12 @@ postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: postcss-zindex@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-6.0.2.tgz#e498304b83a8b165755f53db40e2ea65a99b56e1" + resolved "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-6.0.2.tgz" integrity sha512-5BxW9l1evPB/4ZIc+2GobEBoKC+h8gPGCMi+jxsYvd2x0mjq7wazk6DrP71pStqxE9Foxh5TVnonbWpFZzXaYg== -postcss@^8.4.21, postcss@^8.4.26, postcss@^8.4.33: - version "8.4.36" - resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.36.tgz" - integrity sha512-/n7eumA6ZjFHAsbX30yhHup/IMkOmlmvtEi7P+6RMYf+bGJSUHc3geH4a0NSZxAz/RJfiS9tooCTs9LAVYUZKw== - dependencies: - nanoid "^3.3.7" - picocolors "^1.0.0" - source-map-js "^1.1.0" - -postcss@^8.4.24, postcss@^8.4.38: +"postcss@^7.0.0 || ^8.0.1", postcss@^8.0.9, postcss@^8.1.0, postcss@^8.2.2, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.24, postcss@^8.4.26, postcss@^8.4.31, postcss@^8.4.33, postcss@^8.4.38: version "8.4.38" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.38.tgz#b387d533baf2054288e337066d81c6bee9db9e0e" + resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz" integrity sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A== dependencies: nanoid "^3.3.7" @@ -8464,10 +7567,10 @@ pretty-time@^1.1.0: resolved "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz" integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA== -prism-react-renderer@^2.1.0, prism-react-renderer@^2.3.0: - version "2.3.1" - resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-2.3.1.tgz" - integrity sha512-Rdf+HzBLR7KYjzpJ1rSoxT9ioO85nZngQEoFIhL07XhtJHlCU3SOz0GJ6+qvMyQe0Se+BV3qpe6Yd/NmQF5Juw== +prism-react-renderer@^2.3.0, prism-react-renderer@^2.4.0: + version "2.4.0" + resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-2.4.0.tgz" + integrity sha512-327BsVCD/unU4CNLZTWVHyUHKnsqcvj2qbPlQ8MiBE2eq2rgctjigPA1Gp9HLF83kZ20zNN6jgizHJeEsyFYOw== dependencies: "@types/prismjs" "^1.26.0" clsx "^2.0.0" @@ -8534,12 +7637,12 @@ pupa@^3.1.0: dependencies: escape-goat "^4.0.0" -qs@6.11.0: - version "6.11.0" - resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" queue-microtask@^1.2.2: version "1.2.3" @@ -8565,16 +7668,21 @@ randombytes@^2.1.0: dependencies: safe-buffer "^5.1.0" -range-parser@1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" - integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== +range-parser@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -range-parser@^1.2.1, range-parser@~1.2.1: +range-parser@~1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== +range-parser@1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== + raw-body@2.5.2: version "2.5.2" resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz" @@ -8625,9 +7733,9 @@ react-dev-utils@^12.0.1: strip-ansi "^6.0.1" text-table "^0.2.0" -react-dom@^18.3.1: +react-dom@*, "react-dom@^16.14.0 || 17 || ^18", "react-dom@^16.6.0 || ^17.0.0 || ^18.0.0", react-dom@^18.0.0, react-dom@^18.3.1, "react-dom@>= 16.8.0 < 19.0.0": version "18.3.1" - resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4" + resolved "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz" integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw== dependencies: loose-envify "^1.1.0" @@ -8660,9 +7768,9 @@ react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== react-json-view-lite@^1.2.0: - version "1.3.0" - resolved "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.3.0.tgz" - integrity sha512-aN1biKC5v4DQkmQBlZjuMFR09MKZGMPtIg+cut8zEeg2HXd6gl2gRy0n4HMacHf0dznQgo0SVXN7eT8zV3hEuQ== + version "1.5.0" + resolved "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.5.0.tgz" + integrity sha512-nWqA1E4jKPklL2jvHWs6s+7Na0qNgw9HCP6xehdQJeg6nPBTFZgGwyko9Q0oj+jQWKTTVRS30u0toM5wiuL3iw== react-loadable-ssr-addon-v5-slorber@^1.0.1: version "1.0.1" @@ -8671,9 +7779,9 @@ react-loadable-ssr-addon-v5-slorber@^1.0.1: dependencies: "@babel/runtime" "^7.10.3" -"react-loadable@npm:@docusaurus/react-loadable@6.0.0": +react-loadable@*, "react-loadable@npm:@docusaurus/react-loadable@6.0.0": version "6.0.0" - resolved "https://registry.yarnpkg.com/@docusaurus/react-loadable/-/react-loadable-6.0.0.tgz#de6c7f73c96542bd70786b8e522d535d69069dc4" + resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-6.0.0.tgz" integrity sha512-YMMxTUQV/QFSnbgrP3tjDzLHRg7vsbMn8e9HAa8o/1iXoiomo48b7sk/kkmWEuWNDPJVlKSJRB6Y2fHqdJk+SQ== dependencies: "@types/react" "*" @@ -8698,7 +7806,7 @@ react-router-dom@^5.3.4: tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@5.3.4, react-router@^5.3.4: +react-router@^5.3.4, react-router@>=5, react-router@5.3.4: version "5.3.4" resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz" integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== @@ -8713,9 +7821,9 @@ react-router@5.3.4, react-router@^5.3.4: tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react@^18.3.1: +react@*, "react@^16.13.1 || ^17.0.0 || ^18.0.0", "react@^16.14.0 || ^17 || ^18", "react@^16.6.0 || ^17.0.0 || ^18.0.0", react@^18.0.0, react@^18.3.1, "react@>= 16.8.0 < 19.0.0", react@>=15, react@>=16, react@>=16.0.0: version "18.3.1" - resolved "https://registry.yarnpkg.com/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891" + resolved "https://registry.npmjs.org/react/-/react-18.3.1.tgz" integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ== dependencies: loose-envify "^1.1.0" @@ -9005,9 +8113,9 @@ rtl-detect@^1.0.4: integrity sha512-PGMBq03+TTG/p/cRB7HCLKJ1MgDIi07+QU1faSjiYRfmY5UsAttV9Hs08jDAHVwcOwmVLcSJkpwyfXszVjWfIQ== rtlcss@^4.1.0: - version "4.1.1" - resolved "https://registry.npmjs.org/rtlcss/-/rtlcss-4.1.1.tgz" - integrity sha512-/oVHgBtnPNcggP2aVXQjSy6N1mMAfHg4GSag0QtZBlD5bdDgAHwr4pydqJGd+SUCu9260+Pjqbjwtvu7EMH1KQ== + version "4.3.0" + resolved "https://registry.npmjs.org/rtlcss/-/rtlcss-4.3.0.tgz" + integrity sha512-FI+pHEn7Wc4NqKXMXFM+VAYKEj/mRIcW4h24YVwVtyjI+EqGrLc2Hx/Ny0lrZ21cBWU2goLy36eqMcNj3AQJig== dependencies: escalade "^3.1.1" picocolors "^1.0.0" @@ -9033,15 +8141,20 @@ sade@^1.7.3: dependencies: mri "^1.1.0" -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@^5.1.0, safe-buffer@>=5.1.0, safe-buffer@~5.2.0, safe-buffer@5.2.1: + version "5.2.1" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== +safe-buffer@5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": version "2.1.2" @@ -9049,27 +8162,36 @@ safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@^1.2.4: - version "1.3.0" - resolved "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz" - integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== + version "1.4.1" + resolved "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== scheduler@^0.23.2: version "0.23.2" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3" + resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz" integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ== dependencies: loose-envify "^1.1.0" -schema-utils@2.7.0: - version "2.7.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== +schema-utils@^3.0.0: + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^3.1.1: + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: +schema-utils@^3.2.0: version "3.3.0" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== @@ -9088,6 +8210,20 @@ schema-utils@^4.0.0, schema-utils@^4.0.1: ajv-formats "^2.1.1" ajv-keywords "^5.1.0" +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +"search-insights@>= 1 < 3": + version "2.17.2" + resolved "https://registry.npmjs.org/search-insights/-/search-insights-2.17.2.tgz" + integrity sha512-zFNpOpUO+tY2D85KrxJ+aqwnIfdEGi06UH2+xEb+Bp9Mwznmauqc9djbnBibJO5mpfUPPa8st6Sx65+vbeO45g== + section-matter@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" @@ -9128,10 +8264,10 @@ semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.npmjs.org/send/-/send-0.19.0.tgz" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -9181,15 +8317,15 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" set-function-length@^1.2.1: version "1.2.2" @@ -9251,7 +8387,7 @@ shelljs@^0.8.5: interpret "^1.0.0" rechoir "^0.6.2" -side-channel@^1.0.4: +side-channel@^1.0.6: version "1.0.6" resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz" integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== @@ -9281,9 +8417,9 @@ sisteransi@^1.0.5: integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== sitemap@^7.1.1: - version "7.1.1" - resolved "https://registry.npmjs.org/sitemap/-/sitemap-7.1.1.tgz" - integrity sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg== + version "7.1.2" + resolved "https://registry.npmjs.org/sitemap/-/sitemap-7.1.2.tgz" + integrity sha512-ARCqzHJ0p4gWt+j7NlU5eDlIO9+Rkr/JhPFZKKQ1l5GCus7rJH4UdrlVAh0xC/gDS/Qir2UMxqYNHtsKr2rpCw== dependencies: "@types/node" "^17.0.5" "@types/sax" "^1.2.1" @@ -9309,7 +8445,7 @@ slash@^4.0.0: snake-case@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" + resolved "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz" integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg== dependencies: dot-case "^3.0.4" @@ -9326,19 +8462,14 @@ sockjs@^0.3.24: sort-css-media-queries@2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.2.0.tgz#aa33cf4a08e0225059448b6c40eddbf9f1c8334c" + resolved "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.2.0.tgz" integrity sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA== source-map-js@^1.0.1, source-map-js@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af" + resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz" integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg== -source-map-js@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.1.0.tgz" - integrity sha512-9vC2SfsJzlej6MAaMPLu8HiBSHGdRAJ9hVFYN1ibZoNkeanmDmLUcIrj6G9DGL7XMJ54AKg/G75akXl1/izTOw== - source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" @@ -9347,7 +8478,7 @@ source-map-support@~0.5.20: buffer-from "^1.0.0" source-map "^0.6.0" -source-map@^0.6.0, source-map@~0.6.0: +source-map@^0.6.0: version "0.6.1" resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -9357,6 +8488,11 @@ source-map@^0.7.0: resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz" integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== +source-map@~0.6.0: + version "0.6.1" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + space-separated-tokens@^2.0.0: version "2.0.2" resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz" @@ -9395,22 +8531,45 @@ srcset@^4.0.0: resolved "https://registry.npmjs.org/srcset/-/srcset-4.0.0.tgz" integrity sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw== -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - "statuses@>= 1.4.0 < 2": version "1.5.0" resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + std-env@^3.0.1: version "3.7.0" resolved "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz" integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== -string-width@^4.1.0, string-width@^4.2.0: +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.2.0: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -9428,20 +8587,6 @@ string-width@^5.0.1, string-width@^5.1.2: emoji-regex "^9.2.2" strip-ansi "^7.0.1" -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - stringify-entities@^4.0.0: version "4.0.3" resolved "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.3.tgz" @@ -9509,7 +8654,7 @@ style-to-object@^1.0.0: stylehacks@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-6.1.1.tgz#543f91c10d17d00a440430362d419f79c25545a6" + resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-6.1.1.tgz" integrity sha512-gSTTEQ670cJNoaeIp9KX6lZmm8LJ3jPB5yJmX8Zq/wQxOsAFXV3qjWzHas3YYk1qesuVIyYWWUpZ0vSE/dTSGg== dependencies: browserslist "^4.23.0" @@ -9553,7 +8698,7 @@ svg-parser@^2.0.4: svgo@^3.0.2, svgo@^3.2.0: version "3.3.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.3.2.tgz#ad58002652dffbb5986fc9716afe52d869ecbda8" + resolved "https://registry.npmjs.org/svgo/-/svgo-3.3.2.tgz" integrity sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw== dependencies: "@trysound/sax" "0.2.0" @@ -9652,16 +8797,11 @@ ts-dedent@^2.2.0: resolved "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz" integrity sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ== -tslib@^2.0.3, tslib@^2.6.0: +tslib@^2.0.3, tslib@^2.4.0, tslib@^2.6.0: version "2.6.2" resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== -tslib@^2.4.0: - version "2.6.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0" - integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ== - type-fest@^1.0.1: version "1.4.0" resolved "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz" @@ -9687,11 +8827,21 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" +"typescript@>= 2.7", typescript@>=4.9.5: + version "5.6.2" + resolved "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz" + integrity sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw== + undici-types@~5.26.4: version "5.26.5" resolved "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +undici@^6.19.5: + version "6.19.8" + resolved "https://registry.npmjs.org/undici/-/undici-6.19.8.tgz" + integrity sha512-U8uCCl2x9TK3WANvmBavymRzxbfFYG+tAu+fgx3zxQy3qdagQqBLwJVrdyO1TBfUXvfKveMKJZhpvUYoOjM+4g== + unicode-canonical-property-names-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" @@ -9805,7 +8955,7 @@ universalify@^2.0.0: resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz" integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== -unpipe@1.0.0, unpipe@~1.0.0: +unpipe@~1.0.0, unpipe@1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== @@ -9931,7 +9081,7 @@ vfile@^6.0.0, vfile@^6.0.1: watchpack@^2.4.1: version "2.4.2" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz" integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" @@ -10034,9 +9184,9 @@ webpack-sources@^3.2.3: resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== -webpack@^5.88.1: +"webpack@^4.0.0 || ^5.0.0", "webpack@^4.37.0 || ^5.0.0", webpack@^5.0.0, webpack@^5.1.0, webpack@^5.20.0, webpack@^5.88.1, "webpack@>= 4", "webpack@>=4.41.1 || 5.x", webpack@>=5, "webpack@3 || 4 || 5": version "5.94.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + resolved "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz" integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== dependencies: "@types/estree" "^1.0.5" @@ -10073,7 +9223,7 @@ webpackbar@^5.0.2: pretty-time "^1.1.0" std-env "^3.0.1" -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: +websocket-driver@^0.7.4, websocket-driver@>=0.5.1: version "0.7.4" resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== @@ -10087,6 +9237,18 @@ websocket-extensions@>=0.1.1: resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== +whatwg-encoding@^3.1.1: + version "3.1.1" + resolved "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz" + integrity sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ== + dependencies: + iconv-lite "0.6.3" + +whatwg-mimetype@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz" + integrity sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg== + which@^1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" @@ -10139,12 +9301,12 @@ write-file-atomic@^3.0.3: ws@^7.3.1: version "7.5.10" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + resolved "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz" integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== ws@^8.13.0: version "8.17.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + resolved "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz" integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== xdg-basedir@^5.0.1, xdg-basedir@^5.1.0: