diff --git a/FinModelUtility/Fin/Fin.Compression Tests/goldens/LZSS_0x10/basabasa_fly/input/basabasa_fly.lzss b/FinModelUtility/Fin/Fin.Compression Tests/goldens/LZSS_0x10/basabasa_fly/input/basabasa_fly.lzss
new file mode 100644
index 000000000..4cb74bf20
Binary files /dev/null and b/FinModelUtility/Fin/Fin.Compression Tests/goldens/LZSS_0x10/basabasa_fly/input/basabasa_fly.lzss differ
diff --git a/FinModelUtility/Fin/Fin/src/decompression/ArrayInterfaces.cs b/FinModelUtility/Fin/Fin/src/decompression/ArrayToArrayInterfaces.cs
similarity index 75%
rename from FinModelUtility/Fin/Fin/src/decompression/ArrayInterfaces.cs
rename to FinModelUtility/Fin/Fin/src/decompression/ArrayToArrayInterfaces.cs
index 5e0a01db1..1bd590f5a 100644
--- a/FinModelUtility/Fin/Fin/src/decompression/ArrayInterfaces.cs
+++ b/FinModelUtility/Fin/Fin/src/decompression/ArrayToArrayInterfaces.cs
@@ -2,13 +2,13 @@
namespace fin.decompression;
-public interface IArrayDecompressor {
+public interface IArrayToArrayDecompressor {
bool TryDecompress(byte[] src, out byte[] dst);
byte[] Decompress(byte[] src);
}
-public abstract class BArrayDecompressor : IArrayDecompressor {
+public abstract class BArrayToArrayDecompressor : IArrayToArrayDecompressor {
public abstract bool TryDecompress(byte[] src, out byte[] dst);
public byte[] Decompress(byte[] src) {
diff --git a/FinModelUtility/Fin/Fin/src/decompression/BinaryReaderToArrayInterfaces.cs b/FinModelUtility/Fin/Fin/src/decompression/BinaryReaderToArrayInterfaces.cs
new file mode 100644
index 000000000..b7af5e1bd
--- /dev/null
+++ b/FinModelUtility/Fin/Fin/src/decompression/BinaryReaderToArrayInterfaces.cs
@@ -0,0 +1,23 @@
+using System;
+
+using schema.binary;
+
+namespace fin.decompression;
+
+public interface IBinaryReaderToArrayDecompressor {
+ bool TryDecompress(IBinaryReader br, out byte[] dst);
+ byte[] Decompress(IBinaryReader br);
+}
+
+public abstract class BBinaryReaderToArrayDecompressor
+ : IBinaryReaderToArrayDecompressor {
+ public abstract bool TryDecompress(IBinaryReader br, out byte[] dst);
+
+ public byte[] Decompress(IBinaryReader br) {
+ if (this.TryDecompress(br, out byte[] dst)) {
+ return dst;
+ }
+
+ throw new Exception("Failed to decompress bytes.");
+ }
+}
\ No newline at end of file
diff --git a/FinModelUtility/Fin/Fin/src/decompression/Lz77Decompressor.cs b/FinModelUtility/Fin/Fin/src/decompression/Lz77Decompressor.cs
new file mode 100644
index 000000000..0837dcc38
--- /dev/null
+++ b/FinModelUtility/Fin/Fin/src/decompression/Lz77Decompressor.cs
@@ -0,0 +1,179 @@
+using System;
+using System.Collections.Generic;
+using System.Runtime.ConstrainedExecution;
+using System.Text.RegularExpressions;
+
+using fin.math;
+using fin.schema;
+using fin.util.asserts;
+using fin.util.strings;
+
+using schema.binary;
+
+namespace fin.decompression;
+
+///
+/// Shamelessly stolen from:
+/// https://github.com/scurest/apicula/blob/3d4e91e14045392a49c89e86dab8cb936225588c/src/decompress/mod.rs
+///
+public class Lz77Decompressor : BBinaryReaderToArrayDecompressor {
+ public override bool TryDecompress(IBinaryReader br, out byte[] data) {
+ br.AssertString("LZ77");
+
+ var compressionType = br.ReadByte();
+ switch (compressionType) {
+ case 0x10: {
+ data = Decompress10_(br);
+ return true;
+ }
+ case 0x11: {
+ data = Decompress11_(br);
+ return true;
+ }
+ }
+
+ data = default;
+ return false;
+ }
+
+ private static byte[] Decompress10_(IBinaryReader br) {
+ var decompressedSize = ReadDecompressedSize_(br);
+ var data = new List((int) decompressedSize);
+
+ while (data.Count < decompressedSize) {
+ var flags = br.ReadByte();
+
+ for (var i = 0; i < 8; ++i) {
+ var compressed = (flags & 0x80) != 0;
+ flags <<= 1;
+
+ if (!compressed) {
+ // Uncompressed byte
+ data.Add(br.ReadByte());
+ } else {
+ // LZ backreference
+ var ofsSub1And3 = br.ReadUInt16();
+ var ofsSub1 = ofsSub1And3.ExtractFromRight(0, 12);
+ var ofsSub3 = ofsSub1And3.ExtractFromRight(12, 4);
+
+ var ofs = ofsSub1 + 1;
+ var n = ofsSub3 + 3;
+
+ if (data.Count + n > decompressedSize) {
+ Asserts.Fail("Too much data!");
+ }
+
+ if (data.Count < ofs) {
+ Asserts.Fail("Not enough data!");
+ }
+
+ for (var ii = 0; ii < 8; ++ii) {
+ var x = data[data.Count - ofs];
+ data.Add(x);
+ }
+ }
+
+ if (data.Count >= decompressedSize) {
+ break;
+ }
+ }
+ }
+
+ return data.ToArray();
+ }
+
+ private static byte[] Decompress11_(IBinaryReader br) {
+ var decompressedSize = ReadDecompressedSize_(br);
+ var data = new List((int) decompressedSize);
+
+ while (data.Count < decompressedSize) {
+ var flags = br.ReadByte();
+ for (var i = 0; i < 8; ++i) {
+ var compressed = (flags & 0x80) != 0;
+ flags <<= 1;
+
+ if (!compressed) {
+ // Uncompressed byte
+ data.Add(br.ReadByte());
+ } else {
+ br.ReadByte().SplitNibbles(out var a, out var b);
+ var cd = br.ReadByte();
+
+ int n, ofs;
+ switch (a) {
+ case 0: {
+ // ab cd ef
+ // =>
+ // n = abc + 0x11 = bc + 0x11
+ // ofs = def + 1
+ cd.SplitNibbles(out var c, out var d);
+ var ef = br.ReadByte();
+
+ n = ((b << 4) | c) + 0x11;
+ ofs = ((d << 8) | ef) + 1;
+ break;
+ }
+ case 1: {
+ // ab cd ef gh
+ // =>
+ // n = bcde + 0x111
+ // ofs = fgh + 1
+ br.ReadByte().SplitNibbles(out var e, out var f);
+ var gh = br.ReadByte();
+
+ n = ((b << 12) | (cd << 4) | e) +
+ 0x111;
+ ofs = ((f << 8) | gh) + 1;
+ break;
+ }
+ default: {
+ // ab cd
+ // =>
+ // n = a + 1
+ // ofs = bcd + 1
+ n = a + 1;
+ ofs = ((b << 8) | cd) + 1;
+ break;
+ }
+ }
+
+ if (data.Count + n > decompressedSize) {
+ Asserts.Fail("Too much data!");
+ }
+
+ if (data.Count < ofs) {
+ Asserts.Fail("Not enough data!");
+ }
+
+ for (var ii = 0; ii < n; ii++) {
+ var x = data[data.Count - ofs];
+ data.Add(x);
+ }
+ }
+
+ if (data.Count >= decompressedSize) {
+ break;
+ }
+ }
+ }
+
+ return data.ToArray();
+ }
+
+ private static uint ReadDecompressedSize_(IBinaryReader br) {
+ var decompressedSize = br.ReadUInt24();
+ if (decompressedSize == 0) {
+ decompressedSize = br.ReadUInt32();
+ }
+
+ if (decompressedSize < 40) {
+ Asserts.Fail($"LZ77 decompressed size is too small: {decompressedSize}");
+ }
+
+ if (decompressedSize > (1 << 19) * 4) {
+ Asserts.Fail($"LZ77 decompressed size is too big: {decompressedSize}");
+ }
+
+ return decompressedSize;
+ }
+}
\ No newline at end of file
diff --git a/FinModelUtility/Fin/Fin/src/math/BitLogic.cs b/FinModelUtility/Fin/Fin/src/math/BitLogic.cs
index 85a5639a6..7f6b02e26 100644
--- a/FinModelUtility/Fin/Fin/src/math/BitLogic.cs
+++ b/FinModelUtility/Fin/Fin/src/math/BitLogic.cs
@@ -6,6 +6,13 @@ namespace fin.math;
public static class BitLogic {
private static readonly byte[] TEMP_ = new byte[4];
+ public static void SplitNibbles(this byte value,
+ out byte high,
+ out byte low) {
+ high = (byte) (value >> 4);
+ low = (byte) (value & 0xF);
+ }
+
public static uint ToUint32(byte a, byte b, byte c, byte d) {
TEMP_[0] = a;
TEMP_[1] = b;
diff --git a/FinModelUtility/Formats/F3dzex2/io/IReadOnlyN64Memory.cs b/FinModelUtility/Formats/F3dzex2/io/IReadOnlyN64Memory.cs
index 8d5848fb6..ecb4e4355 100644
--- a/FinModelUtility/Formats/F3dzex2/io/IReadOnlyN64Memory.cs
+++ b/FinModelUtility/Formats/F3dzex2/io/IReadOnlyN64Memory.cs
@@ -38,7 +38,7 @@ public interface IN64Memory : IReadOnlyN64Memory {
void AddSegment(uint segmentIndex,
uint offset,
uint length,
- IArrayDecompressor? decompressor = null);
+ IArrayToArrayDecompressor? decompressor = null);
void AddSegment(uint segmentIndex, Segment segment);
}
@@ -128,7 +128,7 @@ public bool IsSegmentCompressed(uint segmentIndex)
public void AddSegment(uint segmentIndex,
uint offset,
uint length,
- IArrayDecompressor? decompressor = null)
+ IArrayToArrayDecompressor? decompressor = null)
=> this.AddSegment(segmentIndex,
new Segment {
Offset = offset,
@@ -162,5 +162,5 @@ private bool TryToGetSegmentsAtSegmentedAddress_(
public readonly struct Segment {
public required uint Offset { get; init; }
public required uint Length { get; init; }
- public IArrayDecompressor? Decompressor { get; init; }
+ public IArrayToArrayDecompressor? Decompressor { get; init; }
}
\ No newline at end of file
diff --git a/FinModelUtility/Formats/Level5/Level5/src/decompression/Level5Decompressor.cs b/FinModelUtility/Formats/Level5/Level5/src/decompression/Level5Decompressor.cs
index 460b32bd5..69878ce43 100644
--- a/FinModelUtility/Formats/Level5/Level5/src/decompression/Level5Decompressor.cs
+++ b/FinModelUtility/Formats/Level5/Level5/src/decompression/Level5Decompressor.cs
@@ -2,7 +2,7 @@
namespace level5.decompression;
-public class Level5Decompressor : BArrayDecompressor {
+public class Level5Decompressor : BIArrayToArrayDecompressor {
public override bool TryDecompress(byte[] src, out byte[] dst) {
int tableType = (src[0] & 0xFF);
@@ -10,7 +10,7 @@ public override bool TryDecompress(byte[] src, out byte[] dst) {
out _,
out var decompressionType);
- if (new ZlibArrayDecompressor().TryDecompress(src, out dst)) {
+ if (new ZlibArrayToArrayDecompressor().TryDecompress(src, out dst)) {
return true;
}
diff --git a/FinModelUtility/Formats/Level5/Level5/src/decompression/ZlibDecompressor.cs b/FinModelUtility/Formats/Level5/Level5/src/decompression/ZlibDecompressor.cs
index bb1d1d842..5cedd36ed 100644
--- a/FinModelUtility/Formats/Level5/Level5/src/decompression/ZlibDecompressor.cs
+++ b/FinModelUtility/Formats/Level5/Level5/src/decompression/ZlibDecompressor.cs
@@ -4,7 +4,7 @@
namespace level5.decompression;
-public class ZlibArrayDecompressor : BArrayDecompressor {
+public class ZlibArrayToArrayDecompressor : BIArrayToArrayDecompressor {
public override bool TryDecompress(byte[] src, out byte[] dst) {
var b = src;
if (b.Length < 6) {
diff --git a/FinModelUtility/Formats/Level5/Level5/src/schema/Xc.cs b/FinModelUtility/Formats/Level5/Level5/src/schema/Xc.cs
index 377657066..2b617a027 100644
--- a/FinModelUtility/Formats/Level5/Level5/src/schema/Xc.cs
+++ b/FinModelUtility/Formats/Level5/Level5/src/schema/Xc.cs
@@ -44,7 +44,7 @@ public void Read(IBinaryReader br) {
}
var inNameTable = br.SubreadAt(fileTableOffset, () => br.ReadBytes(filenameTableSize));
- if (!new ZlibArrayDecompressor().TryDecompress(inNameTable, out var nameTable)) {
+ if (!new ZlibArrayToArrayDecompressor().TryDecompress(inNameTable, out var nameTable)) {
nameTable = new LzssDecompressor().Decompress(inNameTable);
}
diff --git a/FinModelUtility/Formats/Visceral/Visceral/src/api/StrExtractor.cs b/FinModelUtility/Formats/Visceral/Visceral/src/api/StrExtractor.cs
index 5f7b1e351..07019a501 100644
--- a/FinModelUtility/Formats/Visceral/Visceral/src/api/StrExtractor.cs
+++ b/FinModelUtility/Formats/Visceral/Visceral/src/api/StrExtractor.cs
@@ -63,7 +63,7 @@ public async Task ExtractAsync(IReadOnlyGenericFile strFile,
}
}
- var refPackDecompressor = new RefPackArrayDecompressor();
+ var refPackDecompressor = new RefPackArrayToArrayDecompressor();
await Parallel.ForEachAsync(
headerBlocks,
new ParallelOptions { MaxDegreeOfParallelism = -1, },
diff --git a/FinModelUtility/Formats/Visceral/Visceral/src/decompression/RefPackDecompressor.cs b/FinModelUtility/Formats/Visceral/Visceral/src/decompression/RefPackDecompressor.cs
index 4d076f684..f16288f4a 100644
--- a/FinModelUtility/Formats/Visceral/Visceral/src/decompression/RefPackDecompressor.cs
+++ b/FinModelUtility/Formats/Visceral/Visceral/src/decompression/RefPackDecompressor.cs
@@ -24,7 +24,7 @@
namespace visceral.decompression;
-public class RefPackArrayDecompressor : BArrayDecompressor {
+public class RefPackArrayToArrayDecompressor : BIArrayToArrayDecompressor {
public override bool TryDecompress(byte[] inData, out byte[] outData) {
using var input = new MemoryStream(inData);
Span dummy = stackalloc byte[4];
diff --git a/FinModelUtility/Games/SuperMario64/SuperMario64/src/memory/Sm64Memory.cs b/FinModelUtility/Games/SuperMario64/SuperMario64/src/memory/Sm64Memory.cs
index cb35bd3f6..2a8beb8c5 100644
--- a/FinModelUtility/Games/SuperMario64/SuperMario64/src/memory/Sm64Memory.cs
+++ b/FinModelUtility/Games/SuperMario64/SuperMario64/src/memory/Sm64Memory.cs
@@ -71,7 +71,7 @@ public bool IsSegmentCompressed(uint segmentIndex) {
public void AddSegment(uint segmentIndex,
uint offset,
uint length,
- IArrayDecompressor? decompressor = null) {
+ IArrayToArrayDecompressor? decompressor = null) {
throw new NotImplementedException();
}