diff --git a/Editors/AnimationReTarget/Test.AnimatioReTarget/Test.AnimatioReTarget.csproj b/Editors/AnimationReTarget/Test.AnimatioReTarget/Test.AnimatioReTarget.csproj
index 98a3b72e6..f392b9dd0 100644
--- a/Editors/AnimationReTarget/Test.AnimatioReTarget/Test.AnimatioReTarget.csproj
+++ b/Editors/AnimationReTarget/Test.AnimatioReTarget/Test.AnimatioReTarget.csproj
@@ -20,7 +20,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Examples/DbSchemaBuilder.cs b/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Examples/DbSchemaBuilder.cs
index 165d38db2..e22d39ec0 100644
--- a/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Examples/DbSchemaBuilder.cs
+++ b/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Examples/DbSchemaBuilder.cs
@@ -1,7 +1,5 @@
using System.Data;
-using System.Data.SqlClient;
using System.Data.SQLite;
-using System.Linq;
using Editors.DatabaseEditor.FileFormats;
using Shared.Core.ByteParsing;
using Shared.Core.PackFiles;
@@ -252,9 +250,9 @@ public void PopulateTable(IPackFileService packFileService, SQLiteConnection sql
{
try
{
- using (var copy = new SqlBulkCopy(sqlConnection.ConnectionString))
- {
-
+ //using (var copy = new SqlBulkCopy(sqlConnection.ConnectionString))
+ //{
+
//var _ravi = dt.NewRow();
//_ravi["Name"] = "ravi";
@@ -268,7 +266,7 @@ public void PopulateTable(IPackFileService packFileService, SQLiteConnection sql
// copy.WriteToServer(dt);
- }
+ //}
Console.WriteLine($"{tableSchema.Name} - {parsedTables}/{tableSchemas.Count}");
diff --git a/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Utility.DatabaseSchemaGenerator.csproj b/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Utility.DatabaseSchemaGenerator.csproj
index 513a6d729..6bbb0481b 100644
--- a/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Utility.DatabaseSchemaGenerator.csproj
+++ b/Editors/DatabaseEditor/Utility.DatabaseSchemaGenerator/Utility.DatabaseSchemaGenerator.csproj
@@ -8,9 +8,9 @@
-
-
-
+
+
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/Editors/ImportExportEditor/Editors.ImportExport/Editors.ImportExport.csproj b/Editors/ImportExportEditor/Editors.ImportExport/Editors.ImportExport.csproj
index 0800c9d66..518dfa331 100644
--- a/Editors/ImportExportEditor/Editors.ImportExport/Editors.ImportExport.csproj
+++ b/Editors/ImportExportEditor/Editors.ImportExport/Editors.ImportExport.csproj
@@ -18,8 +18,8 @@
-
-
+
+
diff --git a/Editors/ImportExportEditor/Test.ImportExport/Test.ImportExport.csproj b/Editors/ImportExportEditor/Test.ImportExport/Test.ImportExport.csproj
index 9a970e5c9..0edebb3df 100644
--- a/Editors/ImportExportEditor/Test.ImportExport/Test.ImportExport.csproj
+++ b/Editors/ImportExportEditor/Test.ImportExport/Test.ImportExport.csproj
@@ -19,7 +19,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Editors/Kitbashing/Test.KitbashEditor/Test.KitbashEditor.csproj b/Editors/Kitbashing/Test.KitbashEditor/Test.KitbashEditor.csproj
index dd39a27ab..f62faa80a 100644
--- a/Editors/Kitbashing/Test.KitbashEditor/Test.KitbashEditor.csproj
+++ b/Editors/Kitbashing/Test.KitbashEditor/Test.KitbashEditor.csproj
@@ -18,7 +18,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Editors/SimpleAnimationEditors/AnimationPack/Converters/AnimationBinWh3FileToXmlConverter.cs b/Editors/SimpleAnimationEditors/AnimationPack/Converters/AnimationBinWh3FileToXmlConverter.cs
index e29b910a1..0a19399a1 100644
--- a/Editors/SimpleAnimationEditors/AnimationPack/Converters/AnimationBinWh3FileToXmlConverter.cs
+++ b/Editors/SimpleAnimationEditors/AnimationPack/Converters/AnimationBinWh3FileToXmlConverter.cs
@@ -237,7 +237,7 @@ private bool IsAnimFile(string file, IPackFileService pfs, ErrorList errorList,
return false;
}
- var data = theFile.DataSource.ReadData(20);
+ var data = theFile.DataSource.PeekData(20);
var headerIsReallyAnimFile = data[0] == 0x06 || data[0] == 0x07 || data[0] == 0x08; //check if version is not 6 7 8 (or just check if it's 2)
return endsWithAnim && headerIsReallyAnimFile;
}
@@ -252,7 +252,8 @@ private bool IsAnimMetaFile(string file, IPackFileService pfs, ErrorList errorLi
errorList.Warning(animationSlot, $"Inable to locate {file} for {animationSlot}");
return false;
}
- var data = theFile.DataSource.ReadData(20);
+
+ var data = theFile.DataSource.PeekData(20);
var headerIsReallyAnimMetaFile = data[0] == 0x02; //check if version is not 6 7 8 (or just check if it's 2)
return endsWithDotMeta && headerIsReallyAnimMetaFile;
}
@@ -268,7 +269,7 @@ private bool IsSndMetaFile(string file, IPackFileService pfs, ErrorList errorLis
return false;
}
- var data = theFile.DataSource.ReadData(20);
+ var data = theFile.DataSource.PeekData(20);
var headerIsReallyAnimMetaFile = data[0] == 0x02; //check if version is not 6 7 8 (or just check if it's 2)
return endsWithDotMeta && headerIsReallyAnimMetaFile;
}
diff --git a/Editors/SkeletonEditor/Test.SkeletonEditor/Test.SkeletonEditor.csproj b/Editors/SkeletonEditor/Test.SkeletonEditor/Test.SkeletonEditor.csproj
index 0871567f1..59d79737d 100644
--- a/Editors/SkeletonEditor/Test.SkeletonEditor/Test.SkeletonEditor.csproj
+++ b/Editors/SkeletonEditor/Test.SkeletonEditor/Test.SkeletonEditor.csproj
@@ -19,7 +19,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/GameWorld/View3D/Services/SkeletonAnimationLookUpHelper.cs b/GameWorld/View3D/Services/SkeletonAnimationLookUpHelper.cs
index 2f8c1f039..040b76903 100644
--- a/GameWorld/View3D/Services/SkeletonAnimationLookUpHelper.cs
+++ b/GameWorld/View3D/Services/SkeletonAnimationLookUpHelper.cs
@@ -111,7 +111,7 @@ void LoadFromPackFileContainer(PackFileContainer packFileContainer)
Parallel.For(0, allAnimsOtherFiles.Count, index =>
{
var animation = allAnimations[index];
- FileDiscovered(animation.Pack.DataSource.ReadData(100), packFileContainer, animation.FileName, ref skeletonFileNameList, ref animationList);
+ FileDiscovered(animation.Pack.DataSource.PeekData(100), packFileContainer, animation.FileName, ref skeletonFileNameList, ref animationList);
});
foreach (var skeleton in skeletonFileNameList)
@@ -135,7 +135,8 @@ void FileDiscovered(byte[] byteChunk, PackFileContainer container, string fullPa
"animations\\battle\\humanoid13b\\golgfag\\docking\\hu13b_golgfag_docking_armed_02.anim",
"animations\\battle\\humanoid13\\ogre\\rider\\hq3b_stonehorn_wb\\sword_and_crossbow\\missile_action\\crossbow\\hu13_hq3b_swc_rider1_shoot_back_crossbow_01.anim",
"animations\\battle\\humanoid13\\ogre\\rider\\hq3b_stonehorn_wb\\sword_and_crossbow\\missile_action\\crossbow\\hu13_hq3b_swc_rider1_reload_crossbow_01.anim",
- "animations\\battle\\humanoid13\\ogre\\rider\\hq3b_stonehorn_wb\\sword_and_crossbow\\missile_action\\crossbow\\hu13_hq3b_sp_rider1_shoot_ready_crossbow_01.anim"
+ "animations\\battle\\humanoid13\\ogre\\rider\\hq3b_stonehorn_wb\\sword_and_crossbow\\missile_action\\crossbow\\hu13_hq3b_sp_rider1_shoot_ready_crossbow_01.anim",
+ "animations\\battle\\humanoid01c\\sayl_staff_and_skull\\stand\\props\\hu1c_sayl_staff_and_skull_staff_stand_idle_02.anim"
};
if (brokenFiles.Contains(fullPath))
{
diff --git a/Shared/GameFiles/Animation/AnimationFile.cs b/Shared/GameFiles/Animation/AnimationFile.cs
index ac3e4d847..65379a40b 100644
--- a/Shared/GameFiles/Animation/AnimationFile.cs
+++ b/Shared/GameFiles/Animation/AnimationFile.cs
@@ -128,7 +128,7 @@ public AnimationV8OptimizationData(uint boneCount)
public static AnimationHeader GetAnimationHeader(PackFile file)
{
- var data = file.DataSource.ReadData(100);
+ var data = file.DataSource.PeekData(100);
try
{
return GetAnimationHeader(new ByteChunk(data));
diff --git a/Shared/SharedCore/ErrorHandling/PackFileLog.cs b/Shared/SharedCore/ErrorHandling/PackFileLog.cs
index d813d869a..3a51adf93 100644
--- a/Shared/SharedCore/ErrorHandling/PackFileLog.cs
+++ b/Shared/SharedCore/ErrorHandling/PackFileLog.cs
@@ -3,21 +3,15 @@
namespace Shared.Core.ErrorHandling
{
- public class CompressionStats
+ public class CompressionInformation(long diskSize = 0, long uncompressedSize = 0)
{
- public long DiskSize { get; set; }
- public long UncompressedSize { get; set; }
+ public long DiskSize { get; set; } = diskSize;
+ public long UncompressedSize { get; set; } = uncompressedSize;
- public CompressionStats(long diskSize = 0, long uncompressedSize = 0)
+ public void Add(CompressionInformation compressionInformation)
{
- DiskSize = diskSize;
- UncompressedSize = uncompressedSize;
- }
-
- public void Add(CompressionStats stat)
- {
- DiskSize += stat.DiskSize;
- UncompressedSize += stat.UncompressedSize;
+ DiskSize += compressionInformation.DiskSize;
+ UncompressedSize += compressionInformation.UncompressedSize;
}
}
@@ -25,22 +19,19 @@ public static class PackFileLog
{
private static readonly ILogger s_logger = Logging.CreateStatic(typeof(PackFileLog));
- public static Dictionary GetCompressionStats(PackFileContainer container)
+ public static Dictionary GetCompressionInformation(PackFileContainer container)
{
- var stats = new Dictionary();
+ var compressionInformation = new Dictionary();
foreach (var packFile in container.FileList.Values)
{
if (packFile.DataSource is PackedFileSource source)
{
- var format = source.IsCompressed
- ? source.CompressionFormat
- : CompressionFormat.None;
-
- if (!stats.TryGetValue(format, out var totals))
+ var compressionFormat = source.IsCompressed ? source.CompressionFormat : CompressionFormat.None;
+ if (!compressionInformation.TryGetValue(compressionFormat, out var totals))
{
- totals = new CompressionStats();
- stats[format] = totals;
+ totals = new CompressionInformation();
+ compressionInformation[compressionFormat] = totals;
}
totals.DiskSize += source.Size;
@@ -48,77 +39,98 @@ public static Dictionary GetCompressionStat
}
}
- return stats;
+ return compressionInformation;
}
public static void LogPackCompression(PackFileContainer container)
{
- var stats = GetCompressionStats(container);
+ var compressionInformation = GetCompressionInformation(container);
var totalFiles = container.FileList.Count;
- var packSizeFmt = FormatSize(container.OriginalLoadByteSize);
+ var packSize = FormatSize(container.OriginalLoadByteSize);
+
+ var loadingPart = $"Loading {container.Name}.pack ({totalFiles} files, {packSize})";
- var loadingPart = $"Loading {container.Name}.pack ({totalFiles} files, {packSizeFmt})";
+ var fileCountsByCompressionFormat = new Dictionary();
+ var fileTypeCountsByCompressionFormat = new Dictionary>();
- var fileCounts = new Dictionary();
- foreach (var pf in container.FileList.Values)
+ foreach (var packFile in container.FileList.Values)
{
- if (pf.DataSource is PackedFileSource src)
+ if (packFile.DataSource is not PackedFileSource packedFileSource)
+ continue;
+
+ var compressionFormat = packedFileSource.IsCompressed ? packedFileSource.CompressionFormat : CompressionFormat.None;
+
+ if (!fileCountsByCompressionFormat.TryGetValue(compressionFormat, out var fileCount))
+ fileCountsByCompressionFormat[compressionFormat] = 1;
+ else
+ fileCountsByCompressionFormat[compressionFormat] = fileCount + 1;
+
+ var fileType = string.IsNullOrWhiteSpace(packFile.Extension) ? "no_extension" : packFile.Extension;
+
+ if (!fileTypeCountsByCompressionFormat.TryGetValue(compressionFormat, out var fileTypeCounts))
{
- var fmt = src.IsCompressed
- ? src.CompressionFormat
- : CompressionFormat.None;
-
- if (!fileCounts.TryGetValue(fmt, out var cnt))
- fileCounts[fmt] = 1;
- else
- fileCounts[fmt] = cnt + 1;
+ fileTypeCounts = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ fileTypeCountsByCompressionFormat[compressionFormat] = fileTypeCounts;
}
+
+ if (!fileTypeCounts.TryGetValue(fileType, out var fileTypeCount))
+ fileTypeCounts[fileType] = 1;
+ else
+ fileTypeCounts[fileType] = fileTypeCount + 1;
}
- var segments = stats
- .OrderBy(kvp => kvp.Key)
- .Select(kvp =>
+ var segments = new List();
+
+ foreach (var compressionEntry in compressionInformation.OrderBy(compressionEntry => compressionEntry.Key))
+ {
+ var compressionFormat = compressionEntry.Key;
+ var count = fileCountsByCompressionFormat.TryGetValue(compressionFormat, out var fileCount)? fileCount : 0;
+ var diskSize = FormatSize(compressionEntry.Value.DiskSize);
+
+ var fileSizes = compressionFormat == CompressionFormat.None
+ ? $"Disk Size: {diskSize}"
+ : $"Disk Size: {diskSize}, Uncompressed Size: {FormatSize(compressionEntry.Value.UncompressedSize)}";
+
+ var fileTypes = string.Empty;
+ if (fileTypeCountsByCompressionFormat.TryGetValue(compressionFormat, out var fileTypeCounts) && fileTypeCounts.Count > 0)
{
- var fmt = kvp.Key;
- var count = fileCounts.TryGetValue(fmt, out var c) ? c : 0;
- var disk = FormatSize(kvp.Value.DiskSize);
+ var fileTypeSegments = new List();
- if (fmt == CompressionFormat.None)
- return $"{fmt}: {count} files, {disk} (Disk Size)";
+ foreach (var fileTypeEntry in fileTypeCounts.OrderBy(fileTypeEntry => fileTypeEntry.Key, StringComparer.OrdinalIgnoreCase))
+ fileTypeSegments.Add($"{fileTypeEntry.Key} ({fileTypeEntry.Value})");
- var unc = FormatSize(kvp.Value.UncompressedSize);
- return $"{fmt}: {count} files, {disk} (Disk Size), {unc} (Uncompressed Size)";
- })
- .ToList();
+ fileTypes = $": {string.Join(", ", fileTypeSegments)}";
+ }
+
+ segments.Add($"{compressionFormat} ({count} files, {fileSizes}){fileTypes}");
+ }
- var compressionPart = $"File Compression – {string.Join(" | ", segments)}";
- s_logger.Here().Information($"{loadingPart} | {compressionPart}");
+ s_logger.Here().Information($"{loadingPart} | {string.Join(" | ", segments)}");
}
- public static void LogPacksCompression(IDictionary globalStats)
+ public static void LogPacksCompression(IDictionary allCompressionInformation)
{
- var segments = globalStats
- .OrderBy(kvp => kvp.Key)
- .Select(kvp =>
- {
- var format = kvp.Key;
- var diskFormatted = FormatSize(kvp.Value.DiskSize);
+ var segments = new List();
- if (format == CompressionFormat.None)
- return $"{format}: {diskFormatted} (Disk Size)";
+ foreach (var compressionEntry in allCompressionInformation.OrderBy(compressionEntry => compressionEntry.Key))
+ {
+ var compressionFormat = compressionEntry.Key;
+ var diskSize = FormatSize(compressionEntry.Value.DiskSize);
- var uncompressedFormatted = FormatSize(kvp.Value.UncompressedSize);
- return $"{format}: {diskFormatted} (Disk Size), {uncompressedFormatted} (Uncompressed Size)";
- })
- .ToList();
+ if (compressionFormat == CompressionFormat.None)
+ {
+ segments.Add($"{compressionFormat}: {diskSize} (Disk Size)");
+ continue;
+ }
- var totalDisk = globalStats.Values.Sum(stat => stat.DiskSize);
- var totalUncompressed = globalStats.Values.Sum(stat => stat.UncompressedSize);
+ var uncompressedSize = FormatSize(compressionEntry.Value.UncompressedSize);
+ segments.Add($"{compressionFormat}: {diskSize} (Disk Size), {uncompressedSize} (Uncompressed Size)");
+ }
- var totalDiskFormatted = FormatSize(totalDisk);
- var totalUncompressedFormatted = FormatSize(totalUncompressed);
+ var totalDiskSize = FormatSize(allCompressionInformation.Values.Sum(compressionInformation => compressionInformation.DiskSize));
+ var totalUncompressedSize = FormatSize(allCompressionInformation.Values.Sum(compressionInformation => compressionInformation.UncompressedSize));
- var totalSegment = $"Total: {totalDiskFormatted} (Disk Size), {totalUncompressedFormatted} (Uncompressed Size)";
+ var totalSegment = $"Total: {totalDiskSize} (Disk Size), {totalUncompressedSize} (Uncompressed Size)";
var summary = string.Join(" | ", segments.Append(totalSegment));
s_logger.Here().Information($"Size of compressed files in all packs by format - {summary}");
diff --git a/Shared/SharedCore/PackFiles/Models/DataSource.cs b/Shared/SharedCore/PackFiles/Models/DataSource.cs
index 03b64838d..9da1994c0 100644
--- a/Shared/SharedCore/PackFiles/Models/DataSource.cs
+++ b/Shared/SharedCore/PackFiles/Models/DataSource.cs
@@ -13,7 +13,7 @@ public interface IDataSource
{
long Size { get; }
byte[] ReadData();
- byte[] ReadData(int size);
+ byte[] PeekData(int size);
ByteChunk ReadDataAsChunk();
}
@@ -22,19 +22,25 @@ public class FileSystemSource : IDataSource
public long Size { get; private set; }
protected string filepath;
+
public FileSystemSource(string filepath)
: base()
{
- Size = new FileInfo(filepath).Length;
+ var size = new FileInfo(filepath).Length;
+ if (size > uint.MaxValue)
+ throw new InvalidOperationException($"This file's size ({size:N0}) is too large. The maximum file size {uint.MaxValue:N0}.");
+
+ Size = (uint)size;
this.filepath = filepath;
}
+
public byte[] ReadData()
{
return File.ReadAllBytes(filepath);
}
- public byte[] ReadData(int size)
+ public byte[] PeekData(int size)
{
using (var reader = new BinaryReader(new FileStream(filepath, FileMode.Open)))
{
@@ -55,17 +61,19 @@ public class MemorySource : IDataSource
public long Size { get; private set; }
private byte[] data;
+
public MemorySource(byte[] data)
{
Size = data.Length;
this.data = data;
}
+
public byte[] ReadData()
{
return data;
}
- public byte[] ReadData(int size)
+ public byte[] PeekData(int size)
{
var output = new byte[size];
Array.Copy(data, 0, output, 0, size);
@@ -77,6 +85,7 @@ public static MemorySource FromFile(string path)
{
return new MemorySource(File.ReadAllBytes(path));
}
+
public ByteChunk ReadDataAsChunk()
{
return new ByteChunk(ReadData());
@@ -116,32 +125,56 @@ public PackedFileSource(
public byte[] ReadData()
{
var data = new byte[Size];
- using (Stream stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
+
+ using (var stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
stream.Seek(Offset, SeekOrigin.Begin);
- stream.Read(data, 0, data.Length);
+ stream.ReadExactly(data);
}
if (IsEncrypted)
data = PackFileEncryption.Decrypt(data);
+
if (IsCompressed)
- data = PackFileCompression.Decompress(data);
+ {
+ data = PackFileCompression.Decompress(data, (int)UncompressedSize, CompressionFormat);
+ if (data.Length != UncompressedSize)
+ throw new InvalidDataException($"Decompressed bytes {data.Length:N0} does not match the expected uncompressed bytes {UncompressedSize:N0}.");
+ }
+
return data;
}
- public byte[] ReadData(int size)
+ public byte[] PeekData(int size)
{
- var data = new byte[size];
- using (Stream stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
+ byte[] data;
+
+ using (var stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
stream.Seek(Offset, SeekOrigin.Begin);
- stream.Read(data, 0, data.Length);
- }
- if (IsEncrypted)
- data = PackFileEncryption.Decrypt(data);
- if (IsCompressed)
- data = PackFileCompression.Decompress(data);
+ if (!IsEncrypted && !IsCompressed)
+ {
+ data = new byte[size];
+ stream.ReadExactly(data);
+ }
+ else
+ {
+ data = new byte[Size];
+ stream.ReadExactly(data);
+
+ if (IsEncrypted)
+ data = PackFileEncryption.Decrypt(data);
+
+ if (IsCompressed)
+ {
+ data = PackFileCompression.Decompress(data, size, CompressionFormat);
+ if (data.Length != size)
+ throw new InvalidDataException($"Decompressed bytes {data.Length:N0} does not match the expected uncompressed bytes {size:N0}.");
+ }
+ }
+ }
+
return data;
}
@@ -149,26 +182,34 @@ public byte[] ReadData(Stream knownStream)
{
var data = new byte[Size];
knownStream.Seek(Offset, SeekOrigin.Begin);
- knownStream.Read(data, 0, (int)Size);
+ knownStream.ReadExactly(data, 0, (int)Size);
if (IsEncrypted)
data = PackFileEncryption.Decrypt(data);
+
if (IsCompressed)
- data = PackFileCompression.Decompress(data);
+ {
+ data = PackFileCompression.Decompress(data, (int)UncompressedSize, CompressionFormat);
+ if (data.Length != UncompressedSize)
+ throw new InvalidDataException($"Decompressed bytes {data.Length:N0} does not match the expected uncompressed bytes {UncompressedSize:N0}.");
+ }
+
return data;
}
public byte[] ReadDataWithoutDecompressing()
{
var data = new byte[Size];
- using (Stream stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
+
+ using (var stream = File.Open(_parent.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
stream.Seek(Offset, SeekOrigin.Begin);
- stream.Read(data, 0, data.Length);
+ stream.ReadExactly(data);
}
if (IsEncrypted)
data = PackFileEncryption.Decrypt(data);
+
return data;
}
diff --git a/Shared/SharedCore/PackFiles/Models/PackFileContainer.cs b/Shared/SharedCore/PackFiles/Models/PackFileContainer.cs
index c722d5a4c..44204feac 100644
--- a/Shared/SharedCore/PackFiles/Models/PackFileContainer.cs
+++ b/Shared/SharedCore/PackFiles/Models/PackFileContainer.cs
@@ -3,9 +3,9 @@
namespace Shared.Core.PackFiles.Models
{
- public record PackFileWriteInfo(
+ public record PackFileWriteInformation(
PackFile PackFile,
- long FileSizeMetadataPosition,
+ long SizePosition,
CompressionFormat CurrentCompressionFormat,
CompressionFormat IntendedCompressionFormat);
@@ -56,13 +56,12 @@ public void SaveToByteArray(BinaryWriter writer, GameInformation gameInformation
Header.FileCount = (uint)FileList.Count;
PackFileSerializer.WriteHeader(Header, (uint)fileNamesOffset, writer);
- var filesToWrite = new List();
+ var filesToWrite = new List();
// Write file metadata
foreach (var file in sortedFiles)
{
var packFile = file.Value;
- var fileSize = (int)packFile.DataSource.Size;
// Determine compression info
var currentCompressionFormat = CompressionFormat.None;
@@ -73,7 +72,7 @@ public void SaveToByteArray(BinaryWriter writer, GameInformation gameInformation
var shouldCompress = intendedCompressionFormat != CompressionFormat.None;
// File size placeholder (rewritten later)
- var fileSizePosition = writer.BaseStream.Position;
+ var sizePosition = writer.BaseStream.Position;
writer.Write(0);
// Timestamp
@@ -91,11 +90,7 @@ public void SaveToByteArray(BinaryWriter writer, GameInformation gameInformation
// Zero terminator
writer.Write((byte)0);
- filesToWrite.Add(new PackFileWriteInfo(
- packFile,
- fileSizePosition,
- currentCompressionFormat,
- intendedCompressionFormat));
+ filesToWrite.Add(new PackFileWriteInformation(packFile, sizePosition, currentCompressionFormat, intendedCompressionFormat));
}
var packedFileSourceParent = new PackedFileSourceParent { FilePath = SystemFilePath };
@@ -105,41 +100,45 @@ public void SaveToByteArray(BinaryWriter writer, GameInformation gameInformation
{
var packFile = file.PackFile;
byte[] data;
- uint uncompressedFileSize = 0;
+ uint uncompressedSize = 0;
- // Read the data
+ // Determine compression info
var shouldCompress = file.IntendedCompressionFormat != CompressionFormat.None;
var isCorrectCompressionFormat = file.CurrentCompressionFormat == file.IntendedCompressionFormat;
+
+ // Read the data
if (shouldCompress && !isCorrectCompressionFormat)
{
// Decompress the data
var uncompressedData = packFile.DataSource.ReadData();
- uncompressedFileSize = (uint)uncompressedData.Length;
+ uncompressedSize = (uint)uncompressedData.Length;
// Compress the data into the right format
var compressedData = PackFileCompression.Compress(uncompressedData, file.IntendedCompressionFormat);
data = compressedData;
+
+ // Validate new compression
+ var decompressedData = PackFileCompression.Decompress(compressedData, uncompressedData.Length, file.IntendedCompressionFormat);
+ if (decompressedData.Length != uncompressedData.Length)
+ throw new InvalidDataException($"Decompressed bytes {decompressedData.Length:N0} does not match the expected uncompressed bytes {uncompressedData.Length:N0}.");
}
else if (packFile.DataSource is PackedFileSource packedFileSource && isCorrectCompressionFormat)
{
- // The data is already in the right format so just get the compressed data
- uncompressedFileSize = packedFileSource.UncompressedSize;
- var compressedData = packedFileSource.ReadDataWithoutDecompressing();
- data = compressedData;
+ // The data is already in the right format so just get the data as is
+ uncompressedSize = packedFileSource.UncompressedSize;
+ data = packedFileSource.ReadDataWithoutDecompressing();
}
else
data = packFile.DataSource.ReadData();
- var fileSize = (uint)data.Length;
-
// Write the data
var offset = writer.BaseStream.Position;
writer.Write(data);
- // Patch the file size metadata placeholder
+ // Patch the size from the position stored earlier
var currentPosition = writer.BaseStream.Position;
- writer.BaseStream.Position = file.FileSizeMetadataPosition;
- writer.Write(fileSize);
+ writer.BaseStream.Position = file.SizePosition;
+ writer.Write(data.Length);
writer.BaseStream.Position = currentPosition;
// We do not encrypt
@@ -149,11 +148,11 @@ public void SaveToByteArray(BinaryWriter writer, GameInformation gameInformation
packFile.DataSource = new PackedFileSource(
packedFileSourceParent,
offset,
- fileSize,
+ data.Length,
isEncrypted,
shouldCompress,
file.IntendedCompressionFormat,
- uncompressedFileSize);
+ uncompressedSize);
}
}
}
diff --git a/Shared/SharedCore/PackFiles/PackFileCompression.cs b/Shared/SharedCore/PackFiles/PackFileCompression.cs
index 5d766c13f..f5604cd29 100644
--- a/Shared/SharedCore/PackFiles/PackFileCompression.cs
+++ b/Shared/SharedCore/PackFiles/PackFileCompression.cs
@@ -1,6 +1,7 @@
-using EasyCompressor;
-using K4os.Compression.LZ4.Encoders;
+using K4os.Compression.LZ4.Encoders;
using K4os.Compression.LZ4.Streams;
+using SevenZip;
+using SevenZip.Compression.LZMA;
using Shared.Core.Settings;
using ZstdSharp;
using ZstdSharp.Unsafe;
@@ -54,19 +55,9 @@ public enum CompressionFormat
public static class PackFileCompression
{
- // LZMA alone doesn't have a defined magic number, but it always starts with one of these, depending on the compression level
- private static readonly uint[] s_magicNumbersLzma = [
- 0x0100_005D,
- 0x1000_005D,
- 0x0800_005D,
- 0x2000_005D,
- 0x4000_005D,
- 0x8000_005D,
- 0x0000_005D,
- 0x0400_005D,
- ];
- private static readonly uint s_magicNumberLz4 = 0x184D_2204;
- private static readonly uint s_magicNumberZstd = 0xfd2f_b528;
+ private const byte LzmaPropertiesIdentifier = 0x5D;
+ private const uint Lz4MagicNumber = 0x184D_2204;
+ private const uint ZstdMagicNumber = 0xfd2f_b528;
// CA generally compress file types in specific formats, presumably because they compress better in that format.
// Sometimes CA compress file types in various formats (though predominantly in one format), presumably by
@@ -114,90 +105,73 @@ public static class PackFileCompression
".parsed",
];
- public static byte[] Decompress(byte[] data)
+ public static byte[] Decompress(byte[] data, int outputSize, CompressionFormat compressionFormat)
{
- var result = Array.Empty();
- if (data == null || data.Length == 0)
- return result;
-
using var stream = new MemoryStream(data, false);
using var reader = new BinaryReader(stream);
- // Read the header and get what we need
var uncompressedSize = reader.ReadUInt32();
- var magicNumber = reader.ReadUInt32();
- var compressionFormat = GetCompressionFormat(magicNumber);
- stream.Seek(-4, SeekOrigin.Current);
+ if (outputSize > uncompressedSize)
+ throw new InvalidDataException($"Output size {outputSize:N0} cannot be greater than the uncompressed size {uncompressedSize:N0}.");
if (compressionFormat == CompressionFormat.Zstd)
- return DecompressZstd(reader, uncompressedSize);
+ return DecompressZstd(reader.BaseStream, outputSize);
if (compressionFormat == CompressionFormat.Lz4)
- return DecompressLz4(reader, uncompressedSize);
+ return DecompressLz4(reader.BaseStream, outputSize);
else if (compressionFormat == CompressionFormat.Lzma1)
- result = DecompressLzma(data, uncompressedSize);
- else if (compressionFormat == CompressionFormat.None)
- return data;
-
- if (result.Length != uncompressedSize)
- throw new InvalidDataException($"Expected {uncompressedSize:N0} bytes after decompression, but got {result.Length:N0}.");
+ return DecompressLzma(reader.BaseStream, outputSize);
+ else
+ throw new InvalidDataException("Uh oh, the data is either not compressed or has some unknown compression format.");
+ }
- return result;
+ private static byte[] DecompressZstd(Stream compressedDataStream, int outputSize)
+ {
+ var output = new byte[outputSize];
+ using var decompressionStream = new DecompressionStream(compressedDataStream);
+ ReadExactly(decompressionStream, output, 0, outputSize);
+ return output;
}
- private static byte[] DecompressZstd(BinaryReader reader, uint uncompressedSize)
+ private static byte[] DecompressLz4(Stream compressedDataStream, int outputSize)
{
- var buffer = new byte[uncompressedSize];
- var output = new MemoryStream(buffer);
- using var decompressionStream = new DecompressionStream(reader.BaseStream);
- decompressionStream.CopyTo(output);
- return output.ToArray();
+ var output = new byte[outputSize];
+ using var decompressionStream = new LZ4DecoderStream(compressedDataStream, i => new LZ4ChainDecoder(i.BlockSize, 0));
+ ReadExactly(decompressionStream, output, 0, outputSize);
+ return output;
}
- private static byte[] DecompressLz4(BinaryReader reader, uint uncompressedSize)
+ private static byte[] DecompressLzma(Stream stream, int outputSize)
{
- var buffer = new byte[uncompressedSize];
- var output = new MemoryStream(buffer);
- var decompressor = new LZ4DecoderStream(reader.BaseStream, i => new LZ4ChainDecoder(i.BlockSize, 0));
- decompressor.CopyTo(output);
- return output.ToArray();
+ // Read the property bytes
+ var lzmaPropertiesSize = 5;
+ var lzmaProperties = new byte[lzmaPropertiesSize];
+ ReadExactly(stream, lzmaProperties, 0, lzmaPropertiesSize);
+
+ var remainingInputSize = stream.Length - stream.Position;
+
+ var output = new byte[outputSize];
+ using var outputStream = new MemoryStream(output, 0, outputSize, writable: true, publiclyVisible: true);
+
+ var decoder = new Decoder();
+ decoder.SetDecoderProperties(lzmaProperties);
+ decoder.Code(stream, outputStream, remainingInputSize, outputSize, null);
+
+ if (outputStream.Position != outputSize)
+ throw new InvalidDataException($"Expected uncompressed bytes {outputSize:N0} but only received {outputStream.Position:N0} decompressed bytes.");
+
+ return output;
}
- private static byte[] DecompressLzma(byte[] data, uint uncompressedSize)
+ private static void ReadExactly(Stream stream, byte[] buffer, int offset, int count)
{
- var uncompressedSizeFieldSize = sizeof(uint);
- var headerDataLength = 5;
- var injectedSizeLength = sizeof(ulong);
-
- // Compute all the offsets
- var headerStart = uncompressedSizeFieldSize;
- var headerEnd = headerStart + headerDataLength;
- var footerStart = headerEnd;
- var minTotalSize = footerStart;
-
- // LZMA1 headers have 13 bytes, but we only have 9 due to using a u32 size
- if (data.Length < minTotalSize)
- throw new InvalidDataException("File too small to be valid LZMA.");
-
- // Unlike other formats, in this one we need to inject the uncompressed size in the file header otherwise it won't be a valid lzma file
- using var primary = new MemoryStream(data.Length + injectedSizeLength);
- primary.Write(data, headerStart, headerDataLength);
- primary.Write(BitConverter.GetBytes((ulong)uncompressedSize), 0, injectedSizeLength);
- primary.Write(data, footerStart, data.Length - footerStart);
- primary.Position = 0;
-
- try
- {
- return LZMACompressor.Shared.Decompress(primary.ToArray());
- }
- catch
+ var totalBytesRead = 0;
+ while (totalBytesRead < count)
{
- // Some files may still fail so fall back to a unknown size (u64::MAX) instead
- using var fallback = new MemoryStream(data.Length + injectedSizeLength);
- fallback.Write(data, headerStart, headerDataLength);
- fallback.Write(BitConverter.GetBytes(ulong.MaxValue), 0, injectedSizeLength);
- fallback.Write(data, footerStart, data.Length - footerStart);
- fallback.Position = 0;
- return LZMACompressor.Shared.Decompress(fallback.ToArray());
+ var bytesRead = stream.Read(buffer, offset + totalBytesRead, count - totalBytesRead);
+ if (bytesRead == 0)
+ throw new InvalidDataException($"Requested {count:N0} bytes but only received {totalBytesRead:N0} bytes.");
+
+ totalBytesRead += bytesRead;
}
}
@@ -209,62 +183,80 @@ public static byte[] Compress(byte[] data, CompressionFormat compressionFormat)
return CompressLz4(data);
else if (compressionFormat == CompressionFormat.Lzma1)
return CompressLzma1(data);
- return data;
+ else
+ throw new InvalidDataException("Uh oh, the data either cannot be compressed or has some unknown compression format.");
}
private static byte[] CompressZstd(byte[] data)
{
using var stream = new MemoryStream();
- var uncompressedSize = data.Length;
- stream.Write(BitConverter.GetBytes((uint)uncompressedSize));
+ stream.Write(BitConverter.GetBytes((uint)data.Length));
using (var compressor = new CompressionStream(stream, 3, leaveOpen: true))
{
compressor.SetParameter(ZSTD_cParameter.ZSTD_c_contentSizeFlag, 1);
compressor.SetParameter(ZSTD_cParameter.ZSTD_c_checksumFlag, 1);
- compressor.SetPledgedSrcSize((ulong)uncompressedSize);
- compressor.Write(data, 0, uncompressedSize);
+ compressor.SetPledgedSrcSize((ulong)data.Length);
+ compressor.Write(data, 0, data.Length);
}
return stream.ToArray();
}
+
private static byte[] CompressLz4(byte[] data)
{
using var stream = new MemoryStream();
- var uncompressedSize = data.Length;
- stream.Write(BitConverter.GetBytes((uint)uncompressedSize));
+ stream.Write(BitConverter.GetBytes((uint)data.Length));
using (var encoder = LZ4Stream.Encode(stream, leaveOpen: true))
- encoder.Write(data, 0, uncompressedSize);
+ encoder.Write(data, 0, data.Length);
return stream.ToArray();
}
private static byte[] CompressLzma1(byte[] data)
{
- var compressedData = LZMACompressor.Shared.Compress(data);
- var compressedSize = compressedData.Length;
- if (compressedSize < 13)
- throw new InvalidDataException("Data cannot be compressed");
-
using var stream = new MemoryStream();
- var uncompressedSize = data.Length;
- stream.Write(BitConverter.GetBytes(uncompressedSize), 0, 4);
- stream.Write(compressedData, 0, 5);
- stream.Write(compressedData, 13, compressedSize - 13);
+ stream.Write(BitConverter.GetBytes(data.Length), 0, 4);
+
+ var encoder = new Encoder();
+ encoder.SetCoderProperties(
+ [
+ CoderPropID.DictionarySize,
+ CoderPropID.PosStateBits,
+ CoderPropID.LitContextBits,
+ CoderPropID.LitPosBits
+ ],
+ [
+ 0x0040_0000,
+ 2,
+ 3,
+ 0
+ ]);
+
+ // Read the property bytes
+ encoder.WriteCoderProperties(stream);
+
+ // Write the payload
+ using var input = new MemoryStream(data, writable: false);
+ encoder.Code(input, stream, input.Length, -1, null);
return stream.ToArray();
}
- public static CompressionFormat GetCompressionFormat(uint magicNumber)
+ public static CompressionFormat GetCompressionFormat(byte[] compressionFormatBytes)
{
- if (magicNumber == s_magicNumberZstd)
+ // Lzma1 is identified by the properties
+ if (compressionFormatBytes[0] == LzmaPropertiesIdentifier)
+ return CompressionFormat.Lzma1;
+
+ // Zstd and Lz4 are identified by their magic numbers
+ var magicNumber = BitConverter.ToUInt32(compressionFormatBytes);
+ if (magicNumber == ZstdMagicNumber)
return CompressionFormat.Zstd;
- else if (magicNumber == s_magicNumberLz4)
+ else if (magicNumber == Lz4MagicNumber)
return CompressionFormat.Lz4;
- else if (s_magicNumbersLzma.Contains(magicNumber))
- return CompressionFormat.Lzma1;
else
return CompressionFormat.None;
}
@@ -277,7 +269,7 @@ public static CompressionFormat GetCompressionFormat(GameInformation gameInforma
if (compressionFormats.All(compressionFormat => compressionFormat == CompressionFormat.None))
return CompressionFormat.None;
- // We use rootFolder for normal db tables because they don't have an extension
+ // We use the root folder for db tables because they don't have an extension
var isTable = firstFilePathPart == "db" || extension == ".loc";
var hasExtension = !string.IsNullOrEmpty(extension);
@@ -285,7 +277,7 @@ public static CompressionFormat GetCompressionFormat(GameInformation gameInforma
if (!isTable && !hasExtension)
return CompressionFormat.None;
- // Only compress tables in WH3 (and newer games?) as compresse tables are bugged in older games
+ // Only compress tables in WH3 (and newer games?) as compressed tables are bugged in older games
if (isTable && compressionFormats.Contains(CompressionFormat.Zstd) && gameInformation.Type == GameTypeEnum.Warhammer3)
return CompressionFormat.Zstd;
else if (isTable)
diff --git a/Shared/SharedCore/PackFiles/PackFileContainerLoader.cs b/Shared/SharedCore/PackFiles/PackFileContainerLoader.cs
index 23b95ddd7..e47a746e2 100644
--- a/Shared/SharedCore/PackFiles/PackFileContainerLoader.cs
+++ b/Shared/SharedCore/PackFiles/PackFileContainerLoader.cs
@@ -110,7 +110,7 @@ private static void AddFolderContentToPackFile(PackFileContainer container, stri
}
var packList = new List();
- var packsCompressionStats = new ConcurrentDictionary();
+ var packsCompressionStats = new ConcurrentDictionary();
Parallel.ForEach(allCaPackFiles, packFilePath =>
{
@@ -124,11 +124,11 @@ private static void AddFolderContentToPackFile(PackFileContainer container, stri
packList.Add(pack);
PackFileLog.LogPackCompression(pack);
- var packCompressionStats = PackFileLog.GetCompressionStats(pack);
+ var packCompressionStats = PackFileLog.GetCompressionInformation(pack);
foreach (var kvp in packCompressionStats)
{
if (!packsCompressionStats.TryGetValue(kvp.Key, out var existingStats))
- packsCompressionStats[kvp.Key] = new CompressionStats(kvp.Value.DiskSize, kvp.Value.UncompressedSize);
+ packsCompressionStats[kvp.Key] = new CompressionInformation(kvp.Value.DiskSize, kvp.Value.UncompressedSize);
else
existingStats.Add(kvp.Value);
}
diff --git a/Shared/SharedCore/PackFiles/PackFileSerializer.cs b/Shared/SharedCore/PackFiles/PackFileSerializer.cs
index f0c45aa98..26eb802e8 100644
--- a/Shared/SharedCore/PackFiles/PackFileSerializer.cs
+++ b/Shared/SharedCore/PackFiles/PackFileSerializer.cs
@@ -81,8 +81,8 @@ public static PackFileContainer Load(string packFileSystemPath, BinaryReader rea
using var compressionStream = new MemoryStream(fileHeader, false);
using var compressionReader = new BinaryReader(compressionStream);
uncompressedSize = compressionReader.ReadUInt32();
- var magicNumber = compressionReader.ReadUInt32();
- compressionFormat = PackFileCompression.GetCompressionFormat(magicNumber);
+ var compressionFormatBytes = compressionReader.ReadBytes(4);
+ compressionFormat = PackFileCompression.GetCompressionFormat(compressionFormatBytes);
}
var packedFileSource = new PackedFileSource(packedFileSourceParent, offset, size, isEncrypted, isCompressed, compressionFormat, uncompressedSize);
diff --git a/Shared/SharedCore/Shared.Core.csproj b/Shared/SharedCore/Shared.Core.csproj
index 0b062eea5..cdb5b806d 100644
--- a/Shared/SharedCore/Shared.Core.csproj
+++ b/Shared/SharedCore/Shared.Core.csproj
@@ -7,35 +7,33 @@
-
-
-
+
+
+
-
+
-
-
+
+
-
-
+
+
-
-
+
+
-
-
diff --git a/Testing/E2EVerification/Test.E2EVerification.csproj b/Testing/E2EVerification/Test.E2EVerification.csproj
index a46a75c7f..0faefb240 100644
--- a/Testing/E2EVerification/Test.E2EVerification.csproj
+++ b/Testing/E2EVerification/Test.E2EVerification.csproj
@@ -27,7 +27,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Testing/GameWorld.Core.Test/Test.GameWorld.Core.csproj b/Testing/GameWorld.Core.Test/Test.GameWorld.Core.csproj
index 691ef049e..2aa4a3a5c 100644
--- a/Testing/GameWorld.Core.Test/Test.GameWorld.Core.csproj
+++ b/Testing/GameWorld.Core.Test/Test.GameWorld.Core.csproj
@@ -19,7 +19,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Testing/Shared.Core.Test/PackFiles/PackFileCompressionTests.cs b/Testing/Shared.Core.Test/PackFiles/PackFileCompressionTests.cs
index dc941a7aa..3f4eb249d 100644
--- a/Testing/Shared.Core.Test/PackFiles/PackFileCompressionTests.cs
+++ b/Testing/Shared.Core.Test/PackFiles/PackFileCompressionTests.cs
@@ -18,7 +18,7 @@ public void Setup()
_packFileService = new PackFileService(eventHub.Object);
_container = _packFileService.CreateNewPackFileContainer("EncryptedOutput", PackFileCAType.MOD, true);
- // Use files that aren't tiny so that they can actually be compressed rather than increase in size when being compressed
+ // Use files that are large enough for compression to be effective as files that are too small may actually increase in size when compressed
List files = [
new("Directory_0", PackFile.CreateFromASCII("file0.txt", new string('A', 1_024))),
new("Directory_0", PackFile.CreateFromASCII("file1.txt", new string('B', 2_048))),
@@ -32,10 +32,8 @@ public void Setup()
[Test]
public void TestCompressAndDecompressPackFile()
{
- var compressionFormats = Enum.GetValues(typeof(CompressionFormat)).Cast();
- var originals = _container.FileList
- .ToDictionary(file => file.Value.Name,
- file => file.Value.DataSource.ReadData());
+ var compressionFormats = Enum.GetValues().Where(compressionFormat => compressionFormat != CompressionFormat.None);
+ var originals = _container.FileList.ToDictionary(file => file.Value.Name, file => file.Value.DataSource.ReadData());
foreach (var fileName in originals.Keys)
{
@@ -44,24 +42,15 @@ public void TestCompressAndDecompressPackFile()
foreach (var compressionFormat in compressionFormats)
{
var compressedData = PackFileCompression.Compress(data, compressionFormat);
+ var decompressedData = PackFileCompression.Decompress(compressedData, data.Length, compressionFormat);
+ Assert.That(decompressedData, Has.Length.EqualTo(data.Length), $"[{compressionFormat}] {fileName} length mismatch");
- if (compressionFormat != CompressionFormat.None)
- {
- Assert.That(compressedData, Has.Length.LessThan(data.Length),
- $"[{compressionFormat}] {fileName} did not reduce in size: {data.Length} --> {compressedData.Length}");
- }
-
- var decompressed = PackFileCompression.Decompress(compressedData);
- Assert.That(decompressed, Has.Length.EqualTo(data.Length),
- $"[{compressionFormat}] {fileName} length mismatch");
-
- var expected = Encoding.UTF8.GetString(originals[fileName]);
- var actual = Encoding.UTF8.GetString(decompressed);
- Assert.That(actual, Is.EqualTo(expected),
- $"[{compressionFormat}] {fileName} content mismatch after round-trip");
+ var expectedValue = Encoding.UTF8.GetString(originals[fileName]);
+ var actualValue = Encoding.UTF8.GetString(decompressedData);
+ Assert.That(actualValue, Is.EqualTo(expectedValue), $"[{compressionFormat}] {fileName} content mismatch after round-trip");
// Feed back in for the next iteration
- data = decompressed;
+ data = decompressedData;
}
}
}
diff --git a/Testing/Shared.Core.Test/Test.Shared.Core.csproj b/Testing/Shared.Core.Test/Test.Shared.Core.csproj
index 7d4127efa..0d9565e39 100644
--- a/Testing/Shared.Core.Test/Test.Shared.Core.csproj
+++ b/Testing/Shared.Core.Test/Test.Shared.Core.csproj
@@ -19,7 +19,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/Testing/Shared/Test.TestingUtility.csproj b/Testing/Shared/Test.TestingUtility.csproj
index de0bc933f..018909d97 100644
--- a/Testing/Shared/Test.TestingUtility.csproj
+++ b/Testing/Shared/Test.TestingUtility.csproj
@@ -20,7 +20,7 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+