diff --git a/.idea/.idea.LexBox/.idea/indexLayout.xml b/.idea/.idea.LexBox/.idea/indexLayout.xml
index 24b63bcb7..8e8320cf9 100644
--- a/.idea/.idea.LexBox/.idea/indexLayout.xml
+++ b/.idea/.idea.LexBox/.idea/indexLayout.xml
@@ -18,6 +18,8 @@
frontend/.svelte-kit/output
+ frontend/build
+ frontend/viewer/dist-web-component
-
+
\ No newline at end of file
diff --git a/backend/LcmCrdt/LcmCrdtKernel.cs b/backend/LcmCrdt/LcmCrdtKernel.cs
index 631cd529a..c143ce732 100644
--- a/backend/LcmCrdt/LcmCrdtKernel.cs
+++ b/backend/LcmCrdt/LcmCrdtKernel.cs
@@ -26,6 +26,7 @@ public static class LcmCrdtKernel
public static IServiceCollection AddLcmCrdtClient(this IServiceCollection services)
{
LinqToDBForEFTools.Initialize();
+ services.AddMemoryCache();
services.AddCrdtData(
ConfigureDbOptions,
diff --git a/backend/LexBoxApi/Services/CrdtSyncRoutes.cs b/backend/LexBoxApi/Services/CrdtSyncRoutes.cs
index 57f51859d..c3bb92f1c 100644
--- a/backend/LexBoxApi/Services/CrdtSyncRoutes.cs
+++ b/backend/LexBoxApi/Services/CrdtSyncRoutes.cs
@@ -15,10 +15,10 @@ public static IEndpointConventionBuilder MapSyncApi(this IEndpointRouteBuilder e
group.MapGet("/get",
async (Guid id, LexBoxDbContext dbContext) =>
{
- return await dbContext.Set().Where(c => c.ProjectId == id).GetSyncState();
+ return await dbContext.Set().Where(c => c.ProjectId == id).GetSyncState();
});
group.MapPost("/add",
- async (Guid id, CrdtCommit[] commits, LexBoxDbContext dbContext) =>
+ async (Guid id, ServerCommit[] commits, LexBoxDbContext dbContext) =>
{
foreach (var commit in commits)
{
@@ -31,8 +31,8 @@ public static IEndpointConventionBuilder MapSyncApi(this IEndpointRouteBuilder e
group.MapPost("/changes",
async (Guid id, SyncState clientHeads, LexBoxDbContext dbContext) =>
{
- var commits = dbContext.Set().Where(c => c.ProjectId == id);
- return await commits.GetChanges(clientHeads);
+ var commits = dbContext.Set().Where(c => c.ProjectId == id);
+ return await commits.GetChanges(clientHeads);
});
return group;
diff --git a/backend/LexBoxApi/Services/HgService.cs b/backend/LexBoxApi/Services/HgService.cs
index 7d9ffd5d9..0b4288650 100644
--- a/backend/LexBoxApi/Services/HgService.cs
+++ b/backend/LexBoxApi/Services/HgService.cs
@@ -18,10 +18,12 @@
namespace LexBoxApi.Services;
-public partial class HgService : IHgService, IHostedService
+public class HgService : IHgService, IHostedService
{
- private const string DELETED_REPO_FOLDER = "_____deleted_____";
- private const string TEMP_REPO_FOLDER = "_____temp_____";
+ private const string DELETED_REPO_FOLDER = ProjectCode.DELETED_REPO_FOLDER;
+ private const string TEMP_REPO_FOLDER = ProjectCode.TEMP_REPO_FOLDER;
+
+ private const string AllZeroHash = "0000000000000000000000000000000000000000";
private readonly IOptions _options;
private readonly Lazy _hgClient;
@@ -34,17 +36,12 @@ public HgService(IOptions options, IHttpClientFactory clientFactory, I
_hgClient = new(() => clientFactory.CreateClient("HgWeb"));
}
- [GeneratedRegex(Project.ProjectCodeRegex)]
- private static partial Regex ProjectCodeRegex();
-
- public static string PrefixRepoRequestPath(string code) => $"{code[0]}/{code}";
- private string PrefixRepoFilePath(string code) => Path.Combine(_options.Value.RepoPath, code[0].ToString(), code);
- private string GetTempRepoPath(string code, string reason) => Path.Combine(_options.Value.RepoPath, TEMP_REPO_FOLDER, $"{code}__{reason}__{FileUtils.ToTimestamp(DateTimeOffset.UtcNow)}");
+ public static string PrefixRepoRequestPath(ProjectCode code) => $"{code.Value[0]}/{code}";
+ private string PrefixRepoFilePath(ProjectCode code) => Path.Combine(_options.Value.RepoPath, code.Value[0].ToString(), code.Value);
+ private string GetTempRepoPath(ProjectCode code, string reason) => Path.Combine(_options.Value.RepoPath, TEMP_REPO_FOLDER, $"{code}__{reason}__{FileUtils.ToTimestamp(DateTimeOffset.UtcNow)}");
- private async Task GetResponseMessage(string code, string requestPath)
+ private async Task GetResponseMessage(ProjectCode code, string requestPath)
{
- if (!ProjectCodeRegex().IsMatch(code))
- throw new ArgumentException($"Invalid project code: {code}.");
var client = _hgClient.Value;
var urlPrefix = DetermineProjectUrlPrefix(HgType.hgWeb, _options.Value);
@@ -58,15 +55,16 @@ private async Task GetResponseMessage(string code, string r
/// Note: The repo is unstable and potentially unavailable for a short while after creation, so don't read from it right away.
/// See: https://github.com/sillsdev/languageforge-lexbox/issues/173#issuecomment-1665478630
///
- public async Task InitRepo(string code)
+ public async Task InitRepo(ProjectCode code)
{
- AssertIsSafeRepoName(code);
if (Directory.Exists(PrefixRepoFilePath(code)))
throw new AlreadyExistsException($"Repo already exists: {code}.");
await Task.Run(() =>
{
InitRepoAt(new DirectoryInfo(PrefixRepoFilePath(code)));
});
+ await InvalidateDirCache(code);
+ await WaitForRepoEmptyState(code, RepoEmptyState.Empty);
}
private void InitRepoAt(DirectoryInfo repoDirectory)
@@ -79,12 +77,12 @@ private void InitRepoAt(DirectoryInfo repoDirectory)
);
}
- public async Task DeleteRepo(string code)
+ public async Task DeleteRepo(ProjectCode code)
{
await Task.Run(() => Directory.Delete(PrefixRepoFilePath(code), true));
}
- public BackupExecutor? BackupRepo(string code)
+ public BackupExecutor? BackupRepo(ProjectCode code)
{
string repoPath = PrefixRepoFilePath(code);
if (!Directory.Exists(repoPath))
@@ -97,16 +95,18 @@ public async Task DeleteRepo(string code)
}, token));
}
- public async Task ResetRepo(string code)
+ public async Task ResetRepo(ProjectCode code)
{
var tmpRepo = new DirectoryInfo(GetTempRepoPath(code, "reset"));
InitRepoAt(tmpRepo);
await SoftDeleteRepo(code, $"{FileUtils.ToTimestamp(DateTimeOffset.UtcNow)}__reset");
//we must init the repo as uploading a zip is optional
tmpRepo.MoveTo(PrefixRepoFilePath(code));
+ await InvalidateDirCache(code);
+ await WaitForRepoEmptyState(code, RepoEmptyState.Empty);
}
- public async Task FinishReset(string code, Stream zipFile)
+ public async Task FinishReset(ProjectCode code, Stream zipFile)
{
var tempRepoPath = GetTempRepoPath(code, "upload");
var tempRepo = Directory.CreateDirectory(tempRepoPath);
@@ -137,6 +137,11 @@ await Task.Run(() =>
// Now we're ready to move the new repo into place, replacing the old one
await DeleteRepo(code);
tempRepo.MoveTo(PrefixRepoFilePath(code));
+ await InvalidateDirCache(code);
+ // If someone uploaded an *empty* repo, we don't want to wait forever for a non-empty state
+ var changelogPath = Path.Join(PrefixRepoFilePath(code), ".hg", "store", "00changelog.i");
+ var expectedState = File.Exists(changelogPath) ? RepoEmptyState.NonEmpty : RepoEmptyState.Empty;
+ await WaitForRepoEmptyState(code, expectedState);
}
///
@@ -156,7 +161,7 @@ await Task.Run(() =>
}
- public Task RevertRepo(string code, string revHash)
+ public Task RevertRepo(ProjectCode code, string revHash)
{
throw new NotImplementedException();
// Steps:
@@ -168,7 +173,7 @@ public Task RevertRepo(string code, string revHash)
// Will need an SSH key as a k8s secret, put it into authorized_keys on the hgweb side so that lexbox can do "ssh hgweb hg clone ..."
}
- public async Task SoftDeleteRepo(string code, string deletedRepoSuffix)
+ public async Task SoftDeleteRepo(ProjectCode code, string deletedRepoSuffix)
{
var deletedRepoName = $"{code}__{deletedRepoSuffix}";
await Task.Run(() =>
@@ -205,12 +210,12 @@ private static void SetPermissionsRecursively(DirectoryInfo rootDir)
}
}
- public bool HasAbandonedTransactions(string projectCode)
+ public bool HasAbandonedTransactions(ProjectCode projectCode)
{
return Path.Exists(Path.Combine(PrefixRepoFilePath(projectCode), ".hg", "store", "journal"));
}
- public bool RepoIsLocked(string projectCode)
+ public bool RepoIsLocked(ProjectCode projectCode)
{
return Path.Exists(Path.Combine(PrefixRepoFilePath(projectCode), ".hg", "store", "lock"));
}
@@ -221,7 +226,7 @@ public bool RepoIsLocked(string projectCode)
return json?["entries"]?.AsArray().FirstOrDefault()?["node"].Deserialize();
}
- public async Task GetLastCommitTimeFromHg(string projectCode)
+ public async Task GetLastCommitTimeFromHg(ProjectCode projectCode)
{
var json = await GetCommit(projectCode, "tip");
//format is this: [1678687688, offset] offset is
@@ -236,13 +241,13 @@ public bool RepoIsLocked(string projectCode)
return date.ToUniversalTime();
}
- private async Task GetCommit(string projectCode, string rev)
+ private async Task GetCommit(ProjectCode projectCode, string rev)
{
var response = await GetResponseMessage(projectCode, $"log?style=json-lex&rev={rev}");
return await response.Content.ReadFromJsonAsync();
}
- public async Task GetChangesets(string projectCode)
+ public async Task GetChangesets(ProjectCode projectCode)
{
var response = await GetResponseMessage(projectCode, "log?style=json-lex");
var logResponse = await response.Content.ReadFromJsonAsync();
@@ -250,11 +255,11 @@ public async Task GetChangesets(string projectCode)
}
- public Task VerifyRepo(string code, CancellationToken token)
+ public Task VerifyRepo(ProjectCode code, CancellationToken token)
{
return ExecuteHgCommandServerCommand(code, "verify", token);
}
- public async Task ExecuteHgRecover(string code, CancellationToken token)
+ public async Task ExecuteHgRecover(ProjectCode code, CancellationToken token)
{
var response = await ExecuteHgCommandServerCommand(code, "recover", token);
// Can't do this with a streamed response, unfortunately. Will have to do it client-side.
@@ -262,7 +267,59 @@ public async Task ExecuteHgRecover(string code, CancellationToken t
return response;
}
- public async Task GetLexEntryCount(string code, ProjectType projectType)
+ public Task InvalidateDirCache(ProjectCode code, CancellationToken token = default)
+ {
+ var repoPath = Path.Join(PrefixRepoFilePath(code));
+ if (Directory.Exists(repoPath))
+ {
+ // Invalidate NFS directory cache by forcing a write and re-read of the repo directory
+ var randomPath = Path.Join(repoPath, Path.GetRandomFileName());
+ while (File.Exists(randomPath) || Directory.Exists(randomPath)) { randomPath = Path.Join(repoPath, Path.GetRandomFileName()); }
+ try
+ {
+ // Create and delete a directory since that's slightly safer than a file
+ var d = Directory.CreateDirectory(randomPath);
+ d.Delete();
+ }
+ catch (Exception) { }
+ }
+ var result = ExecuteHgCommandServerCommand(code, "invalidatedircache", token);
+ return result;
+ }
+
+ public async Task GetTipHash(ProjectCode code, CancellationToken token = default)
+ {
+ var content = await ExecuteHgCommandServerCommand(code, "tip", token);
+ return await content.ReadAsStringAsync();
+ }
+
+ private async Task WaitForRepoEmptyState(ProjectCode code, RepoEmptyState expectedState, int timeoutMs = 30_000, CancellationToken token = default)
+ {
+ // Set timeout so unforeseen errors can't cause an infinite loop
+ using var timeoutSource = CancellationTokenSource.CreateLinkedTokenSource(token);
+ timeoutSource.CancelAfter(timeoutMs);
+ var done = false;
+ try
+ {
+ while (!done && !timeoutSource.IsCancellationRequested)
+ {
+ var hash = await GetTipHash(code, timeoutSource.Token);
+ var isEmpty = hash == AllZeroHash;
+ done = expectedState switch
+ {
+ RepoEmptyState.Empty => isEmpty,
+ RepoEmptyState.NonEmpty => !isEmpty
+ };
+ if (!done) await Task.Delay(2500, timeoutSource.Token);
+ }
+ }
+ // We don't want to actually throw if we hit the timeout, because the operation *will* succeed eventually
+ // once the NFS caches synchronize, so we don't want to propagate an error message to the end user. So
+ // even if the timeout is hit, return as if we succeeded.
+ catch (OperationCanceledException) { }
+ }
+
+ public async Task GetLexEntryCount(ProjectCode code, ProjectType projectType)
{
var command = projectType switch
{
@@ -283,7 +340,7 @@ public async Task HgCommandHealth()
return version.Trim();
}
- private async Task ExecuteHgCommandServerCommand(string code, string command, CancellationToken token)
+ private async Task ExecuteHgCommandServerCommand(ProjectCode code, string command, CancellationToken token)
{
var httpClient = _hgClient.Value;
var baseUri = _options.Value.HgCommandServer;
@@ -292,18 +349,7 @@ private async Task ExecuteHgCommandServerCommand(string code, strin
return response.Content;
}
- private static readonly string[] SpecialDirectoryNames = [DELETED_REPO_FOLDER, TEMP_REPO_FOLDER];
- private static readonly HashSet InvalidRepoNames = [.. SpecialDirectoryNames, "api"];
-
- private void AssertIsSafeRepoName(string name)
- {
- if (InvalidRepoNames.Contains(name, StringComparer.OrdinalIgnoreCase))
- throw new ArgumentException($"Invalid repo name: {name}.");
- if (!ProjectCodeRegex().IsMatch(name))
- throw new ArgumentException($"Invalid repo name: {name}.");
- }
-
- public async Task DetermineProjectType(string projectCode)
+ public async Task DetermineProjectType(ProjectCode projectCode)
{
var response = await GetResponseMessage(projectCode, "file/tip?style=json-lex");
var parsed = await response.Content.ReadFromJsonAsync();
@@ -370,7 +416,7 @@ public static string DetermineProjectUrlPrefix(HgType type, HgConfig hgConfig)
public Task StartAsync(CancellationToken cancellationToken)
{
- var repoContainerDirectories = SpecialDirectoryNames
+ var repoContainerDirectories = ProjectCode.SpecialDirectoryNames
.Concat(Enumerable.Range('a', 'z' - 'a' + 1).Select(c => ((char)c).ToString()))
.Concat(Enumerable.Range(0, 10).Select(c => c.ToString()));
@@ -408,3 +454,9 @@ public class BrowseResponse
{
public BrowseFilesResponse[]? Files { get; set; }
}
+
+public enum RepoEmptyState
+{
+ Empty,
+ NonEmpty
+}
diff --git a/backend/LexBoxApi/Services/ProjectService.cs b/backend/LexBoxApi/Services/ProjectService.cs
index db8a3c0f8..5e351fc38 100644
--- a/backend/LexBoxApi/Services/ProjectService.cs
+++ b/backend/LexBoxApi/Services/ProjectService.cs
@@ -117,6 +117,10 @@ public async Task FinishReset(string code, Stream? zipFile = null)
await hgService.FinishReset(code, zipFile);
await UpdateProjectMetadata(project);
}
+ else
+ {
+ await hgService.InvalidateDirCache(code);
+ }
project.ResetStatus = ResetStatus.None;
project.UpdateUpdatedDate();
await dbContext.SaveChangesAsync();
@@ -146,6 +150,10 @@ public async Task UpdateProjectMetadata(Project project)
project.FlexProjectMetadata.LexEntryCount = count;
}
}
+ else
+ {
+ await hgService.InvalidateDirCache(project.Code);
+ }
project.LastCommit = await hgService.GetLastCommitTimeFromHg(project.Code);
// Caller is responsible for caling dbContext.SaveChangesAsync()
diff --git a/backend/LexBoxApi/Services/TurnstileService.cs b/backend/LexBoxApi/Services/TurnstileService.cs
index 4293b904c..b15d23e66 100644
--- a/backend/LexBoxApi/Services/TurnstileService.cs
+++ b/backend/LexBoxApi/Services/TurnstileService.cs
@@ -6,34 +6,26 @@
namespace LexBoxApi.Services;
-public class TurnstileService
+public class TurnstileService(IHttpClientFactory httpClientFactory, IOptionsSnapshot options)
{
- private readonly IHttpClientFactory _httpClientFactory;
- private readonly IOptionsSnapshot _options;
-
- public TurnstileService(IHttpClientFactory httpClientFactory, IOptionsSnapshot options)
- {
- _httpClientFactory = httpClientFactory;
- _options = options;
- }
-
public async Task IsTokenValid(string token, string? email = null)
{
if (email is not null)
{
- var allowDomain = _options.Value.AllowDomain;
+ var allowDomain = options.Value.AllowDomain;
if (!allowDomain.IsNullOrEmpty() && email.EndsWith($"@{allowDomain}"))
{
return true;
}
}
- var httpClient = _httpClientFactory.CreateClient("cloudflare");
- var data = new StringContent(
- $"secret={_options.Value.TurnstileKey}&response={token}",
- Encoding.UTF8,
- "application/x-www-form-urlencoded"
- );
+ var httpClient = httpClientFactory.CreateClient("cloudflare");
+
+
+ var data = new FormUrlEncodedContent(new Dictionary
+ {
+ { "secret", options.Value.TurnstileKey }, { "response", token }
+ });
var response = await httpClient.PostAsync("https://challenges.cloudflare.com/turnstile/v0/siteverify", data);
var responseJson = await response.Content.ReadFromJsonAsync();
var success = (responseJson?.RootElement.TryGetProperty("success"u8, out var prop) ?? false) && prop.GetBoolean();
diff --git a/backend/LexCore/Entities/ProjectCode.cs b/backend/LexCore/Entities/ProjectCode.cs
new file mode 100644
index 000000000..25153eeec
--- /dev/null
+++ b/backend/LexCore/Entities/ProjectCode.cs
@@ -0,0 +1,43 @@
+using System.Text.RegularExpressions;
+
+namespace LexCore.Entities;
+
+public readonly partial record struct ProjectCode
+{
+ public ProjectCode()
+ {
+ throw new NotSupportedException("Default constructor is not supported.");
+ }
+
+ public ProjectCode(string value)
+ {
+ AssertIsSafeRepoName(value);
+ Value = value;
+ }
+
+ public string Value { get; }
+ public static implicit operator ProjectCode(string code) => new(code);
+
+ public override string ToString()
+ {
+ return Value;
+ }
+
+ public const string DELETED_REPO_FOLDER = "_____deleted_____";
+ public const string TEMP_REPO_FOLDER = "_____temp_____";
+ public static readonly string[] SpecialDirectoryNames = [DELETED_REPO_FOLDER, TEMP_REPO_FOLDER];
+
+ private static readonly HashSet InvalidRepoNames =
+ new([.. SpecialDirectoryNames, "api"], StringComparer.OrdinalIgnoreCase);
+
+ private void AssertIsSafeRepoName(string name)
+ {
+ if (InvalidRepoNames.Contains(name))
+ throw new ArgumentException($"Invalid repo name: {name}.");
+ if (!ProjectCodeRegex().IsMatch(name))
+ throw new ArgumentException($"Invalid repo name: {name}.");
+ }
+
+ [GeneratedRegex(Project.ProjectCodeRegex)]
+ private static partial Regex ProjectCodeRegex();
+}
diff --git a/backend/LexCore/ServiceInterfaces/IHgService.cs b/backend/LexCore/ServiceInterfaces/IHgService.cs
index a5ddadd26..60876ea06 100644
--- a/backend/LexCore/ServiceInterfaces/IHgService.cs
+++ b/backend/LexCore/ServiceInterfaces/IHgService.cs
@@ -5,19 +5,21 @@ namespace LexCore.ServiceInterfaces;
public record BackupExecutor(Func ExecuteBackup);
public interface IHgService
{
- Task InitRepo(string code);
- Task GetLastCommitTimeFromHg(string projectCode);
- Task GetChangesets(string projectCode);
- Task DetermineProjectType(string projectCode);
- Task DeleteRepo(string code);
- Task SoftDeleteRepo(string code, string deletedRepoSuffix);
- BackupExecutor? BackupRepo(string code);
- Task ResetRepo(string code);
- Task FinishReset(string code, Stream zipFile);
- Task VerifyRepo(string code, CancellationToken token);
- Task GetLexEntryCount(string code, ProjectType projectType);
+ Task InitRepo(ProjectCode code);
+ Task GetLastCommitTimeFromHg(ProjectCode projectCode);
+ Task GetChangesets(ProjectCode projectCode);
+ Task DetermineProjectType(ProjectCode projectCode);
+ Task DeleteRepo(ProjectCode code);
+ Task SoftDeleteRepo(ProjectCode code, string deletedRepoSuffix);
+ BackupExecutor? BackupRepo(ProjectCode code);
+ Task ResetRepo(ProjectCode code);
+ Task FinishReset(ProjectCode code, Stream zipFile);
+ Task VerifyRepo(ProjectCode code, CancellationToken token);
+ Task GetTipHash(ProjectCode code, CancellationToken token = default);
+ Task GetLexEntryCount(ProjectCode code, ProjectType projectType);
Task GetRepositoryIdentifier(Project project);
- Task ExecuteHgRecover(string code, CancellationToken token);
- bool HasAbandonedTransactions(string projectCode);
+ Task ExecuteHgRecover(ProjectCode code, CancellationToken token);
+ Task InvalidateDirCache(ProjectCode code, CancellationToken token = default);
+ bool HasAbandonedTransactions(ProjectCode projectCode);
Task HgCommandHealth();
}
diff --git a/backend/LexData/Entities/CommitEntityConfiguration.cs b/backend/LexData/Entities/CommitEntityConfiguration.cs
index e0cdb220d..d40b04341 100644
--- a/backend/LexData/Entities/CommitEntityConfiguration.cs
+++ b/backend/LexData/Entities/CommitEntityConfiguration.cs
@@ -9,9 +9,9 @@
namespace LexData.Entities;
-public class CommitEntityConfiguration : IEntityTypeConfiguration
+public class CommitEntityConfiguration : IEntityTypeConfiguration
{
- public void Configure(EntityTypeBuilder builder)
+ public void Configure(EntityTypeBuilder builder)
{
builder.ToTable("CrdtCommits");
builder.HasKey(c => c.Id);
@@ -30,41 +30,7 @@ public void Configure(EntityTypeBuilder builder)
));
}
- private static JsonChange Deserialize(string s) => JsonSerializer.Deserialize(s)!;
+ private static ServerJsonChange Deserialize(string s) => JsonSerializer.Deserialize(s)!;
- private static string Serialize(JsonChange c) => JsonSerializer.Serialize(c);
-}
-
-public class CrdtCommit : CommitBase
-{
- [JsonConstructor]
- protected CrdtCommit(Guid id, string hash, string parentHash, HybridDateTime hybridDateTime) : base(id,
- hash,
- parentHash,
- hybridDateTime)
- {
- }
-
- public CrdtCommit(Guid id) : base(id)
- {
- }
-
- public CrdtCommit()
- {
- }
-
- public Guid ProjectId { get; set; }
-}
-
-public class JsonChange
-{
- [JsonPropertyName("$type"), JsonPropertyOrder(1)]
- public required string Type { get; set; }
-
- [JsonExtensionData, JsonPropertyOrder(2)]
- public Dictionary? ExtensionData { get; set; }
-
- public static implicit operator JsonChange(JsonElement e) =>
- e.Deserialize() ??
- throw new SerializationException("Failed to deserialize JSON change");
+ private static string Serialize(ServerJsonChange c) => JsonSerializer.Serialize(c);
}
diff --git a/backend/LexData/Migrations/LexBoxDbContextModelSnapshot.cs b/backend/LexData/Migrations/LexBoxDbContextModelSnapshot.cs
index 77c540a3e..0b5f4e5e6 100644
--- a/backend/LexData/Migrations/LexBoxDbContextModelSnapshot.cs
+++ b/backend/LexData/Migrations/LexBoxDbContextModelSnapshot.cs
@@ -467,6 +467,48 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.ToTable("qrtz_triggers", "quartz");
});
+ modelBuilder.Entity("Crdt.Core.ServerCommit", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("ClientId")
+ .HasColumnType("uuid");
+
+ b.Property("Hash")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("Metadata")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("ParentHash")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.ComplexProperty>("HybridDateTime", "Crdt.Core.ServerCommit.HybridDateTime#HybridDateTime", b1 =>
+ {
+ b1.IsRequired();
+
+ b1.Property("Counter")
+ .HasColumnType("bigint");
+
+ b1.Property("DateTime")
+ .HasColumnType("timestamp with time zone");
+ });
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.ToTable("CrdtCommits", (string)null);
+ });
+
modelBuilder.Entity("LexCore.Entities.DraftProject", b =>
{
b.Property("Id")
@@ -981,48 +1023,6 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.ToTable("OpenIddictTokens", (string)null);
});
- modelBuilder.Entity("LexData.Entities.CrdtCommit", b =>
- {
- b.Property("Id")
- .ValueGeneratedOnAdd()
- .HasColumnType("uuid");
-
- b.Property("ClientId")
- .HasColumnType("uuid");
-
- b.Property("Hash")
- .IsRequired()
- .HasColumnType("text");
-
- b.Property("Metadata")
- .IsRequired()
- .HasColumnType("text");
-
- b.Property("ParentHash")
- .IsRequired()
- .HasColumnType("text");
-
- b.Property("ProjectId")
- .HasColumnType("uuid");
-
- b.ComplexProperty>("HybridDateTime", "LexData.Entities.CrdtCommit.HybridDateTime#HybridDateTime", b1 =>
- {
- b1.IsRequired();
-
- b1.Property("Counter")
- .HasColumnType("bigint");
-
- b1.Property("DateTime")
- .HasColumnType("timestamp with time zone");
- });
-
- b.HasKey("Id");
-
- b.HasIndex("ProjectId");
-
- b.ToTable("CrdtCommits", (string)null);
- });
-
modelBuilder.Entity("AppAny.Quartz.EntityFrameworkCore.Migrations.QuartzBlobTrigger", b =>
{
b.HasOne("AppAny.Quartz.EntityFrameworkCore.Migrations.QuartzTrigger", "Trigger")
@@ -1078,6 +1078,48 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.Navigation("JobDetail");
});
+ modelBuilder.Entity("Crdt.Core.ServerCommit", b =>
+ {
+ b.HasOne("LexCore.Entities.FlexProjectMetadata", null)
+ .WithMany()
+ .HasForeignKey("ProjectId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.OwnsMany("Crdt.Core.ChangeEntity", "ChangeEntities", b1 =>
+ {
+ b1.Property("ServerCommitId")
+ .HasColumnType("uuid");
+
+ b1.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ b1.Property("Change")
+ .HasColumnType("text");
+
+ b1.Property("CommitId")
+ .HasColumnType("uuid");
+
+ b1.Property("EntityId")
+ .HasColumnType("uuid");
+
+ b1.Property("Index")
+ .HasColumnType("integer");
+
+ b1.HasKey("ServerCommitId", "Id");
+
+ b1.ToTable("CrdtCommits");
+
+ b1.ToJson("ChangeEntities");
+
+ b1.WithOwner()
+ .HasForeignKey("ServerCommitId");
+ });
+
+ b.Navigation("ChangeEntities");
+ });
+
modelBuilder.Entity("LexCore.Entities.DraftProject", b =>
{
b.HasOne("LexCore.Entities.User", "ProjectManager")
@@ -1176,48 +1218,6 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.Navigation("Authorization");
});
- modelBuilder.Entity("LexData.Entities.CrdtCommit", b =>
- {
- b.HasOne("LexCore.Entities.FlexProjectMetadata", null)
- .WithMany()
- .HasForeignKey("ProjectId")
- .OnDelete(DeleteBehavior.Cascade)
- .IsRequired();
-
- b.OwnsMany("Crdt.Core.ChangeEntity", "ChangeEntities", b1 =>
- {
- b1.Property("CrdtCommitId")
- .HasColumnType("uuid");
-
- b1.Property("Id")
- .ValueGeneratedOnAdd()
- .HasColumnType("integer");
-
- b1.Property("Change")
- .HasColumnType("text");
-
- b1.Property("CommitId")
- .HasColumnType("uuid");
-
- b1.Property("EntityId")
- .HasColumnType("uuid");
-
- b1.Property("Index")
- .HasColumnType("integer");
-
- b1.HasKey("CrdtCommitId", "Id");
-
- b1.ToTable("CrdtCommits");
-
- b1.ToJson("ChangeEntities");
-
- b1.WithOwner()
- .HasForeignKey("CrdtCommitId");
- });
-
- b.Navigation("ChangeEntities");
- });
-
modelBuilder.Entity("AppAny.Quartz.EntityFrameworkCore.Migrations.QuartzJobDetail", b =>
{
b.Navigation("Triggers");
diff --git a/backend/LfClassicData/DataServiceKernel.cs b/backend/LfClassicData/DataServiceKernel.cs
index 3d67e729e..196e87253 100644
--- a/backend/LfClassicData/DataServiceKernel.cs
+++ b/backend/LfClassicData/DataServiceKernel.cs
@@ -1,9 +1,4 @@
using LfClassicData.Configuration;
-using Microsoft.AspNetCore.Builder;
-using Microsoft.AspNetCore.Http;
-using Microsoft.AspNetCore.Mvc;
-using Microsoft.AspNetCore.Routing;
-using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -48,42 +43,4 @@ public static MongoClientSettings BuildMongoClientSettings(IServiceProvider prov
cb.Subscribe(new DiagnosticsActivityEventSubscriber(new() { CaptureCommandText = true }));
return mongoSettings;
}
-
- public static IEndpointConventionBuilder MapLfClassicApi(this IEndpointRouteBuilder builder)
- {
- var group = builder.MapGroup("/api/lfclassic/{projectCode}");
- group.MapGet("/writingSystems",
- (string projectCode, [FromServices] ILexboxApiProvider provider) =>
- {
- var api = provider.GetProjectApi(projectCode);
- return api.GetWritingSystems();
- });
- group.MapGet("/entries",
- (string projectCode,
- [FromServices] ILexboxApiProvider provider,
- int count = 1000,
- int offset = 0
- ) =>
- {
- var api = provider.GetProjectApi(projectCode);
- return api.GetEntries(new QueryOptions(SortOptions.Default, null, count, offset));
- });
- group.MapGet("/entries/{search}",
- (string projectCode,
- [FromServices] ILexboxApiProvider provider,
- string search,
- int count = 1000,
- int offset = 0) =>
- {
- var api = provider.GetProjectApi(projectCode);
- return api.SearchEntries(search, new QueryOptions(SortOptions.Default, null, count, offset));
- });
- group.MapGet("/entry/{id:Guid}",
- (string projectCode, Guid id, [FromServices] ILexboxApiProvider provider) =>
- {
- var api = provider.GetProjectApi(projectCode);
- return api.GetEntry(id);
- });
- return group;
- }
}
diff --git a/backend/LfClassicData/LfClassicRoutes.cs b/backend/LfClassicData/LfClassicRoutes.cs
new file mode 100644
index 000000000..c52ee01fc
--- /dev/null
+++ b/backend/LfClassicData/LfClassicRoutes.cs
@@ -0,0 +1,81 @@
+using System.Text.Json;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.Mvc;
+using Microsoft.AspNetCore.Routing;
+using MiniLcm;
+
+namespace LfClassicData;
+
+public static class LfClassicRoutes
+{
+ public static IEndpointConventionBuilder MapLfClassicApi(this IEndpointRouteBuilder builder)
+ {
+ var group = builder.MapGroup("/api/lfclassic/{projectCode}");
+ group.MapGet("/writingSystems",
+ ([FromRoute] string projectCode, [FromServices] ILexboxApiProvider provider) =>
+ {
+ var api = provider.GetProjectApi(projectCode);
+ return api.GetWritingSystems();
+ });
+ group.MapGet("/entries",
+ ([FromRoute] string projectCode,
+ [FromServices] ILexboxApiProvider provider,
+ [AsParameters] ClassicQueryOptions options
+ ) =>
+ {
+ var api = provider.GetProjectApi(projectCode);
+ return api.GetEntries(options.ToQueryOptions());
+ });
+ group.MapGet("/entries/{search}",
+ ([FromRoute] string projectCode,
+ [FromServices] ILexboxApiProvider provider,
+ [FromRoute] string search,
+ [AsParameters] ClassicQueryOptions options) =>
+ {
+ var api = provider.GetProjectApi(projectCode);
+ return api.SearchEntries(search, options.ToQueryOptions());
+ });
+ group.MapGet("/entry/{id:Guid}",
+ ([FromRoute] string projectCode, Guid id, [FromServices] ILexboxApiProvider provider) =>
+ {
+ var api = provider.GetProjectApi(projectCode);
+ return api.GetEntry(id);
+ });
+ return group;
+ }
+
+ private class ClassicQueryOptions
+ {
+ public QueryOptions ToQueryOptions()
+ {
+ ExemplarOptions? exemplarOptions = string.IsNullOrEmpty(ExemplarValue) || ExemplarWritingSystem is null
+ ? null
+ : new (ExemplarValue, ExemplarWritingSystem.Value);
+ var sortField = Enum.TryParse(SortField, true, out var field) ? field : SortOptions.Default.Field;
+ return new QueryOptions(new SortOptions(sortField,
+ SortWritingSystem ?? SortOptions.Default.WritingSystem,
+ Ascending ?? SortOptions.Default.Ascending),
+ exemplarOptions,
+ Count ?? QueryOptions.Default.Count,
+ Offset ?? QueryOptions.Default.Offset);
+ }
+
+ public string? SortField { get; set; }
+
+ public WritingSystemId? SortWritingSystem { get; set; }
+
+ [FromQuery]
+ public bool? Ascending { get; set; }
+
+ [FromQuery]
+ public string? ExemplarValue { get; set; }
+ public WritingSystemId? ExemplarWritingSystem { get; set; }
+
+ [FromQuery]
+ public int? Count { get; set; }
+
+ [FromQuery]
+ public int? Offset { get; set; }
+ }
+}
diff --git a/backend/Testing/LexCore/CrdtCommitTests.cs b/backend/Testing/LexCore/CrdtServerCommitTests.cs
similarity index 75%
rename from backend/Testing/LexCore/CrdtCommitTests.cs
rename to backend/Testing/LexCore/CrdtServerCommitTests.cs
index 225d59f95..d15269ccd 100644
--- a/backend/Testing/LexCore/CrdtCommitTests.cs
+++ b/backend/Testing/LexCore/CrdtServerCommitTests.cs
@@ -10,29 +10,29 @@
namespace Testing.LexCore;
[Collection(nameof(TestingServicesFixture))]
-public class CrdtCommitTests
+public class CrdtServerCommitTests
{
private readonly LexBoxDbContext _dbContext;
- public CrdtCommitTests(TestingServicesFixture testing)
+ public CrdtServerCommitTests(TestingServicesFixture testing)
{
var serviceProvider = testing.ConfigureServices();
_dbContext = serviceProvider.GetRequiredService();
}
[Fact]
- public async Task CanSaveCrdtCommit()
+ public async Task CanSaveServerCommit()
{
var projectId = await _dbContext.Projects.Select(p => p.Id).FirstAsync();
var commitId = Guid.NewGuid();
- _dbContext.Add(new CrdtCommit(commitId)
+ _dbContext.Add(new ServerCommit(commitId)
{
ClientId = Guid.NewGuid(),
- HybridDateTime = HybridDateTime.ForTestingNow,
+ HybridDateTime = new HybridDateTime(DateTimeOffset.UtcNow, 0),
ProjectId = projectId,
ChangeEntities =
[
- new ChangeEntity()
+ new ChangeEntity()
{
Index = 0,
CommitId = commitId,
@@ -51,14 +51,14 @@ public async Task CanRoundTripCommitChanges()
var projectId = await _dbContext.Projects.Select(p => p.Id).FirstAsync();
var commitId = Guid.NewGuid();
var changeJson = """{"$type":"test","name":"Joe"}""";
- var expectedCommit = new CrdtCommit(commitId)
+ var expectedCommit = new ServerCommit(commitId)
{
ClientId = Guid.NewGuid(),
- HybridDateTime = HybridDateTime.ForTestingNow,
+ HybridDateTime = new HybridDateTime(DateTimeOffset.UtcNow, 0),
ProjectId = projectId,
ChangeEntities =
[
- new ChangeEntity()
+ new ChangeEntity()
{
Index = 0,
CommitId = commitId,
@@ -70,7 +70,7 @@ public async Task CanRoundTripCommitChanges()
_dbContext.Add(expectedCommit);
await _dbContext.SaveChangesAsync();
- var actualCommit = await _dbContext.Set().AsNoTracking().FirstAsync(c => c.Id == commitId);
+ var actualCommit = await _dbContext.Set().AsNoTracking().FirstAsync(c => c.Id == commitId);
actualCommit.ShouldNotBeSameAs(expectedCommit);
JsonSerializer.Serialize(actualCommit.ChangeEntities[0].Change).ShouldBe(changeJson);
}
@@ -79,7 +79,7 @@ public async Task CanRoundTripCommitChanges()
public void TypePropertyShouldAlwaysBeFirst()
{
var changeJson = """{"name":"Joe","$type":"test"}""";
- var jsonChange = JsonSerializer.Deserialize(changeJson);
+ var jsonChange = JsonSerializer.Deserialize(changeJson);
JsonSerializer.Serialize(jsonChange).ShouldBe("""{"$type":"test","name":"Joe"}""");
}
}
diff --git a/backend/Testing/LexCore/ProjectCodeTests.cs b/backend/Testing/LexCore/ProjectCodeTests.cs
new file mode 100644
index 000000000..49b387bfa
--- /dev/null
+++ b/backend/Testing/LexCore/ProjectCodeTests.cs
@@ -0,0 +1,35 @@
+using LexCore.Entities;
+using Shouldly;
+
+namespace Testing.LexCore;
+
+public class ProjectCodeTests
+{
+ [Theory]
+ [InlineData("_____deleted_____")]
+ [InlineData("_____temp_____")]
+ [InlineData("api")]
+ [InlineData("../hacker")]
+ [InlineData("hacker/test")]
+ [InlineData("/hacker")]
+ [InlineData(@"hacker\test")]
+ [InlineData("❌")]
+ [InlineData("!")]
+ [InlineData("#")]
+ [InlineData("-not-start-with-dash")]
+ public void InvalidCodesThrows(string code)
+ {
+ Assert.Throws(() => new ProjectCode(code));
+ }
+
+ [Theory]
+ [InlineData("test-name123")]
+ [InlineData("123-name")]
+ [InlineData("test")]
+ public void ValidCodes(string code)
+ {
+ var projectCode = new ProjectCode(code);
+ projectCode.Value.ShouldBe(code);
+ projectCode.ToString().ShouldBe(code);
+ }
+}
diff --git a/backend/Testing/LexCore/Services/HgServiceTests.cs b/backend/Testing/LexCore/Services/HgServiceTests.cs
index c1bb0c6fa..502b0a3b1 100644
--- a/backend/Testing/LexCore/Services/HgServiceTests.cs
+++ b/backend/Testing/LexCore/Services/HgServiceTests.cs
@@ -7,6 +7,7 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
+using Moq.Contrib.HttpClient;
using Shouldly;
using Testing.Fixtures;
@@ -30,8 +31,16 @@ public HgServiceTests()
HgResumableUrl = LexboxResumable,
SendReceiveDomain = LexboxHgWeb
};
+ var handler = new Mock(MockBehavior.Strict);
+
+ // This may need to become more sophisticated if our FinishReset tests are changed to include
+ // a Mercurial repo with actual commits in it, but this is good enough at the moment.
+ var AllZeroHash = "0000000000000000000000000000000000000000";
+ handler.SetupAnyRequest().ReturnsResponse(AllZeroHash);
+
+ var mockFactory = handler.CreateClientFactory();
_hgService = new HgService(new OptionsWrapper(_hgConfig),
- Mock.Of(),
+ mockFactory,
NullLogger.Instance);
CleanUpTempDir();
}
diff --git a/backend/Testing/Services/Utils.cs b/backend/Testing/Services/Utils.cs
index 76cf059cd..dd8a6559d 100644
--- a/backend/Testing/Services/Utils.cs
+++ b/backend/Testing/Services/Utils.cs
@@ -84,11 +84,6 @@ public static async Task WaitForHgRefreshIntervalAsync()
await Task.Delay(TestingEnvironmentVariables.HgRefreshInterval);
}
- public static async Task WaitForLexboxMetadataUpdateAsync()
- {
- await Task.Delay(3000);
- }
-
private static string GetNewProjectDir(string projectCode,
[CallerMemberName] string projectName = "")
{
diff --git a/backend/Testing/SyncReverseProxy/SendReceiveServiceTests.cs b/backend/Testing/SyncReverseProxy/SendReceiveServiceTests.cs
index 357aa4f3e..baa0bd71e 100644
--- a/backend/Testing/SyncReverseProxy/SendReceiveServiceTests.cs
+++ b/backend/Testing/SyncReverseProxy/SendReceiveServiceTests.cs
@@ -81,14 +81,10 @@ public async Task ModifyProjectData(HgProtocol protocol)
var projectConfig = _srFixture.InitLocalFlexProjectWithRepo();
await using var project = await RegisterProjectInLexBox(projectConfig, _adminApiTester);
- await WaitForHgRefreshIntervalAsync();
-
// Push the project to the server
var sendReceiveParams = new SendReceiveParams(protocol, projectConfig);
_sendReceiveService.SendReceiveProject(sendReceiveParams, AdminAuth);
- await WaitForLexboxMetadataUpdateAsync();
-
// Verify pushed and store last commit
var lastCommitDate = await _adminApiTester.GetProjectLastCommit(projectConfig.Code);
lastCommitDate.ShouldNotBeNullOrEmpty();
@@ -101,8 +97,6 @@ public async Task ModifyProjectData(HgProtocol protocol)
// Push changes
_sendReceiveService.SendReceiveProject(sendReceiveParams, AdminAuth, "Modify project data automated test");
- await WaitForLexboxMetadataUpdateAsync();
-
// Verify the push updated the last commit date
var lastCommitDateAfter = await _adminApiTester.GetProjectLastCommit(projectConfig.Code);
lastCommitDateAfter.ShouldBeGreaterThan(lastCommitDate);
@@ -117,8 +111,6 @@ public async Task SendReceiveAfterProjectReset(HgProtocol protocol)
var projectConfig = _srFixture.InitLocalFlexProjectWithRepo(protocol, "SR_AfterReset");
await using var project = await RegisterProjectInLexBox(projectConfig, _adminApiTester);
- await WaitForHgRefreshIntervalAsync(); // TODO 765: Remove this
-
var sendReceiveParams = new SendReceiveParams(protocol, projectConfig);
var srResult = _sendReceiveService.SendReceiveProject(sendReceiveParams, AdminAuth);
@@ -144,8 +136,6 @@ public async Task SendReceiveAfterProjectReset(HgProtocol protocol)
await _adminApiTester.HttpClient.PostAsync($"{_adminApiTester.BaseUrl}/api/project/resetProject/{projectConfig.Code}", null);
await _adminApiTester.HttpClient.PostAsync($"{_adminApiTester.BaseUrl}/api/project/finishResetProject/{projectConfig.Code}", null);
- await WaitForHgRefreshIntervalAsync(); // TODO 765: Remove this
-
// Step 2: verify project is now empty, i.e. tip is "0000000..."
response = await _adminApiTester.HttpClient.GetAsync(tipUri.Uri);
jsonResult = await response.Content.ReadFromJsonAsync();
@@ -169,8 +159,6 @@ public async Task SendReceiveAfterProjectReset(HgProtocol protocol)
var srResultStep3 = _sendReceiveService.SendReceiveProject(sendReceiveParams, AdminAuth);
_output.WriteLine(srResultStep3);
- await WaitForHgRefreshIntervalAsync(); // TODO 765: Remove this
-
// Step 4: verify project tip is same hash as original project tip
response = await _adminApiTester.HttpClient.GetAsync(tipUri.Uri);
jsonResult = await response.Content.ReadFromJsonAsync();
diff --git a/backend/Testing/Testing.csproj b/backend/Testing/Testing.csproj
index d9a754155..9f3bbc134 100644
--- a/backend/Testing/Testing.csproj
+++ b/backend/Testing/Testing.csproj
@@ -15,7 +15,8 @@
-
+
+
diff --git a/backend/harmony b/backend/harmony
index ce8badb85..26d825dd8 160000
--- a/backend/harmony
+++ b/backend/harmony
@@ -1 +1 @@
-Subproject commit ce8badb853756e350c91994e4d967bc46fee8a80
+Subproject commit 26d825dd8509a4a0793f71cc17ff367327599363
diff --git a/frontend/Taskfile.yml b/frontend/Taskfile.yml
index 53a59f524..ece4c16b6 100644
--- a/frontend/Taskfile.yml
+++ b/frontend/Taskfile.yml
@@ -24,7 +24,7 @@ tasks:
aliases: [ b ]
deps: [ install ]
cmds:
- - pnpm run build
+ - pnpm run -r build
check:
desc: "Runs the frontend code checks done in CI. Note: the app must be built."
aliases: [ sc, svelte-check ]
diff --git a/frontend/src/lib/gql/gql-client.ts b/frontend/src/lib/gql/gql-client.ts
index 124743f60..3ba9ee085 100644
--- a/frontend/src/lib/gql/gql-client.ts
+++ b/frontend/src/lib/gql/gql-client.ts
@@ -26,6 +26,7 @@ import {
LexGqlError,
type SoftDeleteProjectMutationVariables,
type BulkAddProjectMembersMutationVariables,
+ type DeleteDraftProjectMutationVariables,
} from './types';
import type {Readable, Unsubscriber} from 'svelte/store';
import {derived} from 'svelte/store';
@@ -53,7 +54,9 @@ function createGqlClient(_gqlEndpoint?: string): Client {
Mutation: {
softDeleteProject: (result, args: SoftDeleteProjectMutationVariables, cache, _info) => {
cache.invalidate({__typename: 'Project', id: args.input.projectId});
- cache.invalidate({__typename: 'DraftProject', id: args.input.projectId});
+ },
+ deleteDraftProject: (result, args: DeleteDraftProjectMutationVariables, cache, _info) => {
+ cache.invalidate({__typename: 'DraftProject', id: args.input.draftProjectId});
},
deleteUserByAdminOrSelf: (result, args: DeleteUserByAdminOrSelfMutationVariables, cache, _info) => {
cache.invalidate({__typename: 'User', id: args.input.userId});
diff --git a/frontend/src/routes/(authenticated)/project/[project_code]/viewer/lfClassicLexboxApi.ts b/frontend/src/routes/(authenticated)/project/[project_code]/viewer/lfClassicLexboxApi.ts
index f1f911e23..64dc0c4ca 100644
--- a/frontend/src/routes/(authenticated)/project/[project_code]/viewer/lfClassicLexboxApi.ts
+++ b/frontend/src/routes/(authenticated)/project/[project_code]/viewer/lfClassicLexboxApi.ts
@@ -19,19 +19,42 @@ export class LfClassicLexboxApi implements LexboxApi {
}
async GetEntries(_options: QueryOptions | undefined): Promise {
- const result = await fetch(`/api/lfclassic/${this.projectCode}/entries?order=desc&count=100`);
+ //todo pass query options into query
+ const result = await fetch(`/api/lfclassic/${this.projectCode}/entries${this.toQueryParams(_options)}`);
return (await result.json()) as IEntry[];
}
- CreateWritingSystem(_type: WritingSystemType, _writingSystem: WritingSystem): Promise {
- throw new Error('Method not implemented.');
+ async SearchEntries(_query: string, _options: QueryOptions | undefined): Promise {
+ //todo pass query options into query
+ const result = await fetch(`/api/lfclassic/${this.projectCode}/entries/${encodeURIComponent(_query)}${this.toQueryParams(_options)}`);
+ return (await result.json()) as IEntry[];
}
- UpdateWritingSystem(_wsId: string, _type: WritingSystemType, _update: JsonPatch): Promise {
+ private toQueryParams(options: QueryOptions | undefined): string {
+
+ if (!options) return '';
+ /* eslint-disable @typescript-eslint/no-unsafe-assignment */
+ const asc = options.order.ascending ?? true;
+ const params = new URLSearchParams({
+ SortField: options.order.field,
+ SortWritingSystem: options.order.writingSystem,
+ Ascending: asc ? 'true' : 'false',
+ Count: options.count.toString(),
+ Offset: options.offset.toString()
+ });
+ if (options.exemplar) {
+ params.set('ExemplarValue', options.exemplar.value);
+ params.set('ExemplarWritingSystem', options.exemplar.writingSystem);
+ }
+ /* eslint-enable @typescript-eslint/no-unsafe-assignment */
+ return '?' + params.toString();
+ }
+
+ CreateWritingSystem(_type: WritingSystemType, _writingSystem: WritingSystem): Promise {
throw new Error('Method not implemented.');
}
- SearchEntries(_query: string, _options: QueryOptions | undefined): Promise {
+ UpdateWritingSystem(_wsId: string, _type: WritingSystemType, _update: JsonPatch): Promise {
throw new Error('Method not implemented.');
}
diff --git a/frontend/tests/resetProject.test.ts b/frontend/tests/resetProject.test.ts
index f809179cd..359909e7a 100644
--- a/frontend/tests/resetProject.test.ts
+++ b/frontend/tests/resetProject.test.ts
@@ -37,20 +37,14 @@ test('reset project and upload .zip file', async ({ page, tempProject, tempDir }
await resetProjectModel.assertGone();
// Step 2: Get tip hash and file list from hgweb, check some known values
- // It can take a while for the server to pick up the new repo
- let beforeResetJson: HgWebJson;
- await expect(async () => {
- const beforeResetResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
- beforeResetJson = await beforeResetResponse.json() as HgWebJson;
- expect(beforeResetJson).toHaveProperty('node');
- expect(beforeResetJson.node).not.toEqual(allZeroHash);
- expect(beforeResetJson).toHaveProperty('files');
- expect(beforeResetJson.files).toHaveLength(1);
- expect(beforeResetJson.files[0]).toHaveProperty('basename');
- expect(beforeResetJson.files[0].basename).toBe('hello.txt');
- }).toPass({
- intervals: [1_000, 3_000, 5_000],
- });
+ const beforeResetResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
+ const beforeResetJson = await beforeResetResponse.json() as HgWebJson;
+ expect(beforeResetJson).toHaveProperty('node');
+ expect(beforeResetJson.node).not.toEqual(allZeroHash);
+ expect(beforeResetJson).toHaveProperty('files');
+ expect(beforeResetJson.files).toHaveLength(1);
+ expect(beforeResetJson.files[0]).toHaveProperty('basename');
+ expect(beforeResetJson.files[0].basename).toBe('hello.txt');
// Step 3: reset project, do not upload zip file
await projectPage.goto();
@@ -65,16 +59,11 @@ test('reset project and upload .zip file', async ({ page, tempProject, tempDir }
await resetProjectModel.assertGone();
// Step 4: confirm it's empty now
- // It can take a while for the server to pick up the new repo
- await expect(async () => {
- const afterResetResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
- const afterResetJson = await afterResetResponse.json() as HgWebJson;
- expect(afterResetJson.node).toEqual(allZeroHash);
- expect(afterResetJson).toHaveProperty('files');
- expect(afterResetJson.files).toHaveLength(0);
- }).toPass({
- intervals: [1_000, 3_000, 5_000],
- });
+ const afterResetResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
+ const afterResetJson = await afterResetResponse.json() as HgWebJson;
+ expect(afterResetJson.node).toEqual(allZeroHash);
+ expect(afterResetJson).toHaveProperty('files');
+ expect(afterResetJson.files).toHaveLength(0);
// Step 5: reset project again, uploading zip file downloaded from step 1
await projectPage.goto();
@@ -88,12 +77,7 @@ test('reset project and upload .zip file', async ({ page, tempProject, tempDir }
await resetProjectModel.assertGone();
// Step 6: confirm tip hash and contents are same as before reset
- // It can take a while for the server to pick up the new repo
- await expect(async () => {
- const afterUploadResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
- const afterResetJSon = await afterUploadResponse.json() as HgWebJson;
- expect(afterResetJSon).toEqual(beforeResetJson); // NOT .toBe(), which would check that they're the same object.
- }).toPass({
- intervals: [1_000, 3_000, 5_000],
- });
+ const afterUploadResponse = await page.request.get(`${testEnv.serverBaseUrl}/hg/${tempProject.code}/file/tip?style=json-lex`);
+ const afterResetJSon = await afterUploadResponse.json() as HgWebJson;
+ expect(afterResetJSon).toEqual(beforeResetJson); // NOT .toBe(), which would check that they're the same object.
});
diff --git a/hgweb/command-runner.sh b/hgweb/command-runner.sh
index bc511e19e..d03074d54 100644
--- a/hgweb/command-runner.sh
+++ b/hgweb/command-runner.sh
@@ -1,7 +1,7 @@
#!/bin/bash
# Define the list of allowed commands
-allowed_commands=("verify" "tip" "wesaylexentrycount" "lexentrycount" "recover" "healthz")
+allowed_commands=("verify" "tip" "wesaylexentrycount" "lexentrycount" "recover" "healthz" "invalidatedircache")
# Get the project code and command name from the URL
IFS='/' read -ra PATH_SEGMENTS <<< "$PATH_INFO"
@@ -44,6 +44,10 @@ echo ""
# Run the hg command, simply output to stdout
first_char=$(echo $project_code | cut -c1)
+# Ensure NFS cache is refreshed in case project repo changed in another pod (e.g., project reset)
+ls /var/hg/repos/$first_char/$project_code/.hg >/dev/null 2>/dev/null # Don't need output; this is enough to refresh NFS dir cache
+# Sometimes invalidatedircache is called after deleting a project, so the cd would fail. So exit fast in that case.
+[ "x$command_name" = "xinvalidatedircache" ] && exit 0
cd /var/hg/repos/$first_char/$project_code
case $command_name in