From ff3d57779d6f035b7cda4ea65aad833505a96907 Mon Sep 17 00:00:00 2001 From: Anna Date: Wed, 4 Sep 2024 09:15:34 -0400 Subject: [PATCH] fix: check to see if mutex is needed --- DownloadTask.cs | 63 +++++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/DownloadTask.cs b/DownloadTask.cs index b37b25f..403eba5 100644 --- a/DownloadTask.cs +++ b/DownloadTask.cs @@ -59,6 +59,7 @@ internal class DownloadTask : IDisposable { private Util.SentryTransaction? Transaction { get; set; } private bool SupportsHardLinks { get; set; } private SemaphoreSlim DuplicateMutex { get; } = new(1, 1); + private bool RequiresDuplicateMutex { get; set; } private HashSet ExistingHashes { get; } = []; @@ -365,6 +366,23 @@ private void DetermineIfUpdate(IDownloadTask_GetVersion info) { } } + private void CheckOutputPaths(IDownloadTask_GetVersion info) { + var neededFiles = info.NeededFiles.Files.Files; + + var outputToHash = new Dictionary(); + foreach (var (hash, file) in neededFiles) { + foreach (var outputPath in GetOutputPaths(file)) { + if (outputToHash.TryGetValue(outputPath, out var stored) && stored != hash) { + Plugin.Log.Warning($"V:{this.VersionId.ToCrockford()} has the same output path pointing to multiple paths, will use slow duplication"); + this.RequiresDuplicateMutex = true; + return; + } + + outputToHash[outputPath] = hash; + } + } + } + private async Task HashExistingFiles() { this.State = State.CheckingExistingFiles; this.SetStateData(0, 0); @@ -403,15 +421,19 @@ await Parallel.ForEachAsync( Action action = this.SupportsHardLinks ? FileHelper.CreateHardLink : File.Move; - foreach (var (path, hash) in hashes) { - // move/link each path to the hashes path - Plugin.Resilience.Execute(() => action( - path, - Path.Join(this.HashesPath, hash) - )); - - this.ExistingHashes.Add(hash); - } + Parallel.ForEach( + hashes, + (entry) => { + var (path, hash) = entry; + // move/link each path to the hashes path + Plugin.Resilience.Execute(() => action( + path, + Path.Join(this.HashesPath, hash) + )); + + this.ExistingHashes.Add(hash); + } + ); } private async Task DownloadFiles(IDownloadTask_GetVersion info) { @@ -765,7 +787,9 @@ private static string[] GetOutputPaths(IReadOnlyCollection> files) } private async Task DuplicateFile(string filesDir, IEnumerable outputPaths, string path) { - using var guard = await SemaphoreGuard.WaitAsync(this.DuplicateMutex, this.CancellationToken.Token); + using var guard = this.RequiresDuplicateMutex + ? await SemaphoreGuard.WaitAsync(this.DuplicateMutex, this.CancellationToken.Token) + : null; if (!this.SupportsHardLinks) { // If hard links aren't supported, copy the path to the first output @@ -846,14 +870,17 @@ private void RemoveOldFiles() { this.SetStateData(0, total); var done = 0u; - foreach (var extra in presentFiles) { - var extraPath = Path.Join(this.FilesPath, extra); - Plugin.Log.Info($"removing extra file {extraPath}"); - Plugin.Resilience.Execute(() => FileHelper.Delete(extraPath)); - - done += 1; - this.SetStateData(done, total); - } + Parallel.ForEach( + presentFiles, + extra => { + var extraPath = Path.Join(this.FilesPath, extra); + Plugin.Log.Info($"removing extra file {extraPath}"); + Plugin.Resilience.Execute(() => FileHelper.Delete(extraPath)); + + done += 1; + this.SetStateData(done, total); + } + ); // remove any empty directories DirectoryHelper.RemoveEmptyDirectories(this.FilesPath!);