Skip to content

Commit

Permalink
fix: make ui duplication more dumb
Browse files Browse the repository at this point in the history
  • Loading branch information
anna-is-cute committed Aug 23, 2024
1 parent 9f7cab9 commit dc807b7
Showing 1 changed file with 30 additions and 37 deletions.
67 changes: 30 additions & 37 deletions DownloadTask.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1493,7 +1493,7 @@ private async Task DuplicateUiFiles(DefaultMod defaultMod, List<ModGroup> modGro
var filesPath = Path.Join(this.PenumbraModPath, "files");

// first record unique references
var references = new Dictionary<string, Dictionary<string, (uint, List<Action<string>>)>>();
var references = new Dictionary<string, (uint, List<Action<string>>)>();
UpdateReferences(defaultMod.Files);
foreach (var group in modGroups) {
if (group is not StandardModGroup standard) {
Expand All @@ -1506,42 +1506,40 @@ private async Task DuplicateUiFiles(DefaultMod defaultMod, List<ModGroup> modGro
}

// then find any uniquely referenced more than once
foreach (var outputPathCounts in references.Values) {
foreach (var (joinedOutputPath, (refs, updatePathActions)) in outputPathCounts) {
var outputPath = joinedOutputPath[6..];
if (refs < 2) {
continue;
}
foreach (var (joinedOutputPath, (refs, updatePathActions)) in references) {
var outputPath = joinedOutputPath[6..];
if (refs < 2) {
continue;
}

// At this point, we have identified a game path and a path on
// disk that is referenced more than once by differing options.
// This path needs to be duplicated with a different file name
// to avoid crashes. This process can be done using hard links
// if they're supported; otherwise copy the file.
// At this point, we have identified a path on disk that is
// referenced more than once by differing options. This path needs
// to be duplicated with a different file name to avoid crashes.
// This process can be done using hard links if they're supported;
// otherwise copy the file.

Action<string, string> duplicateMethod = this.SupportsHardLinks
? FileHelper.CreateHardLink
: File.Copy;
Action<string, string> duplicateMethod = this.SupportsHardLinks
? FileHelper.CreateHardLink
: File.Copy;

var src = Path.Join(filesPath, outputPath);
for (var i = 0; i < refs; i++) {
var ext = $".{i + 1}" + Path.GetExtension(outputPath);
var newRelative = Path.ChangeExtension(outputPath, ext);
var dst = Path.Join(filesPath, newRelative);
var src = Path.Join(filesPath, outputPath);
for (var i = 0; i < refs; i++) {
var ext = $".{i + 1}" + Path.GetExtension(outputPath);
var newRelative = Path.ChangeExtension(outputPath, ext);
var dst = Path.Join(filesPath, newRelative);

FileHelper.DeleteIfExists(dst);
FileHelper.DeleteIfExists(dst);

Plugin.Resilience.Execute(() => duplicateMethod(src, dst));
Plugin.Resilience.Execute(() => duplicateMethod(src, dst));

// update the path
updatePathActions[i](Path.Join("files", newRelative));
this.ExpectedFiles.Add(newRelative.ToLowerInvariant());
}

// remove the original file
Plugin.Resilience.Execute(() => File.Delete(src));
this.ExpectedFiles.Remove(outputPath);
// update the path
updatePathActions[i](Path.Join("files", newRelative));
this.ExpectedFiles.Add(newRelative.ToLowerInvariant());
}

// remove the original file
Plugin.Resilience.Execute(() => File.Delete(src));
this.ExpectedFiles.Remove(outputPath);
}

await this.SaveDefaultMod(defaultMod);
Expand All @@ -1562,12 +1560,7 @@ void UpdateReferences(Dictionary<string, string> files) {
// normalise case of output path
var normalised = outputPath.ToLowerInvariant();

if (!references.TryGetValue(gamePath, out var outputPathCounts)) {
outputPathCounts = [];
references[gamePath] = outputPathCounts;
}

if (!outputPathCounts.TryGetValue(normalised, out var refs)) {
if (!references.TryGetValue(normalised, out var refs)) {
refs = (0, []);
}

Expand All @@ -1576,7 +1569,7 @@ void UpdateReferences(Dictionary<string, string> files) {
});
refs.Item1 += 1;

outputPathCounts[normalised] = refs;
references[normalised] = refs;
}
}
}
Expand Down

0 comments on commit dc807b7

Please sign in to comment.