-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathSfsFile.cs
234 lines (190 loc) · 8.51 KB
/
SfsFile.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
using System.Text.Json.Serialization;
using Ionic.Zlib;
using NLog;
namespace StarfieldSaveTool;
public class SfsFile(FileInfo fileInfo)
{
struct FileHeader
{
public int Version0 { get; set; }
public long ChunkSizesOffset { get; set; }
public long Unknown0 { get; set; }
public long CompressedDataOffset { get; set; }
public long UncompressedDataSize { get; set; }
public float Version1 { get; set; }
public int Unknown1 { get; set; }
public long SizeUncompressedChunks { get; set; }
public long PaddingSize { get; set; }
public int Unknown2 { get; set; }
public char[] CompressionType { get; set; }
public int ChunkCount { get; set; }
public int[] CompressedChunkSizes { get; set; }
public int[] CompressedChunkSizesWithoutPadding { get; set; }
}
[JsonIgnore] public char[] Magic { get; private set; }
FileHeader Header { get; set; }
public byte[] DecompressedChunks { get; private set; } = [];
private Stream _fileStream = fileInfo.OpenRead();
private Logger _logger = LogManager.GetCurrentClassLogger();
private const string SfsMagic = "BCPS";
private const string PadString = "nexus\0"; // used when compressing to pad to 16 byte boundary
public void ProcessFile()
{
using var br = new BinaryReader(_fileStream);
br.BaseStream.Seek(0, SeekOrigin.Begin);
// quick check for magic bytes
Magic = br.ReadChars(4);
if (new string(Magic) != SfsMagic)
{
_logger.Error("Invalid file format");
throw new Exception($"Not a valid Starfield save. Magic bytes ({SfsMagic}) not found.");
}
// read the header
Header = ReadHeader(br);
// read the compressed data blocks
DecompressedChunks = DecompressChunks(br);
}
public void WriteFile(string path, byte[] data)
{
using var bw = new BinaryWriter(new FileStream(path, FileMode.Create, FileAccess.Write));
bw.BaseStream.Seek(0, SeekOrigin.Begin);
// get the compressed data chunks
var chunks = CompressChunks(data, (int)Header.SizeUncompressedChunks);
// update the header
var fileHeader = Header;
fileHeader.CompressedDataOffset = fileHeader.ChunkSizesOffset + chunks.Count * 4; // start of compressed data blocks : ChunkSizesOffset + size of chunks array of ints
Header = fileHeader;
// write the header
bw.Write(SfsMagic.ToCharArray()); // "BCPS"
bw.Write(Header.Version0);
bw.Write(Header.ChunkSizesOffset);
bw.Write(Header.Unknown0);
bw.Write(Header.CompressedDataOffset);
bw.Write(Header.UncompressedDataSize);
bw.Write(Header.Version1);
bw.Write(Header.Unknown1);
bw.Write(Header.SizeUncompressedChunks);
bw.Write(Header.PaddingSize);
bw.Write(Header.Unknown2);
bw.Write(Header.CompressionType); // "ZIP "
// write the compressed chunk sizes
foreach (var chunk in chunks)
{
bw.Write(chunk.Length);
}
// write the compressed chunks (and we need to pad)
foreach (var chunk in chunks)
{
// write data chunk
bw.Write(chunk);
// work out if we need to pad with bytes to get to the next 16 byte boundary
var padding = PadToNearestSize((int)Header.PaddingSize, chunk.Length) - chunk.Length;
_logger.Debug($"We need to pad by {padding} bytes");
// write padding to fill the gap
bw.Write(GetPadBytesFromLoopingString(PadString, padding));
}
}
private FileHeader ReadHeader(BinaryReader br)
{
var fileHeader = new FileHeader();
fileHeader.Version0 = br.ReadInt32();
fileHeader.ChunkSizesOffset = br.ReadInt64();
fileHeader.Unknown0 = br.ReadInt64();
fileHeader.CompressedDataOffset = br.ReadInt64();
fileHeader.UncompressedDataSize = br.ReadInt64();
fileHeader.Version1 = br.ReadSingle();
fileHeader.Unknown1 = br.ReadInt32();
fileHeader.SizeUncompressedChunks = br.ReadInt64();
fileHeader.PaddingSize = br.ReadInt64();
fileHeader.Unknown2 = br.ReadInt32();
fileHeader.CompressionType = br.ReadChars(4);
fileHeader.ChunkCount = (int)Math.Ceiling((float)fileHeader.UncompressedDataSize / fileHeader.SizeUncompressedChunks);
fileHeader.CompressedChunkSizes = new int[fileHeader.ChunkCount];
fileHeader.CompressedChunkSizesWithoutPadding = new int[fileHeader.ChunkCount];
// read the compressed chunk sizes
for (var i = 0; i < fileHeader.ChunkCount; i++)
{
fileHeader.CompressedChunkSizes[i] = br.ReadInt32();
// need the size without padding
fileHeader.CompressedChunkSizesWithoutPadding[i] = PadToNearestSize((int)fileHeader.PaddingSize, fileHeader.CompressedChunkSizes[i]);
}
return fileHeader;
}
byte[] DecompressChunks(BinaryReader br)
{
//using FileStream outputFileStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write);
using MemoryStream decompressedDataStream = new MemoryStream();
// go to start of compressed data blocks
br.BaseStream.Seek(Header.CompressedDataOffset, SeekOrigin.Begin);
for (var i = 0; i < Header.ChunkCount; i++)
{
// Read the compressed chunk data
byte[] compressedData = br.ReadBytes(Header.CompressedChunkSizesWithoutPadding[i]);
// Decompress the chunk
byte[] decompressedData = Decompress(compressedData);
// Write the decompressed data to the output file
//outputFileStream.Write(decompressedData, 0, decompressedData.Length);
decompressedDataStream.Write(decompressedData, 0, decompressedData.Length);
}
return decompressedDataStream.ToArray();
}
List<byte[]> CompressChunks(byte[] data, int chunkSize)
{
using MemoryStream dataStream = new MemoryStream(data);
// dataStream contains the data to be compressed
List<byte[]> chunks = new List<byte[]>();
// Start from the beginning of the data stream
dataStream.Seek(0, SeekOrigin.Begin);
byte[] buffer = new byte[chunkSize];
int bytesRead;
while ((bytesRead = dataStream.Read(buffer, 0, chunkSize)) > 0)
{
// Compress the chunk
byte[] compressedChunk = Compress(buffer.Take(bytesRead).ToArray());
// add the compressed chunk to the list
chunks.Add(compressedChunk);
}
return chunks;
}
byte[] Compress(byte[] data)
{
using (MemoryStream inputStream = new MemoryStream(data))
using (MemoryStream outputStream = new MemoryStream())
using (ZlibStream deflateStream = new ZlibStream(outputStream, CompressionMode.Compress))
{
inputStream.CopyTo(deflateStream);
deflateStream.Close();
return outputStream.ToArray();
}
}
byte[] Decompress(byte[] data)
{
using (MemoryStream inputStream = new MemoryStream(data))
using (MemoryStream outputStream = new MemoryStream())
using (ZlibStream deflateStream = new ZlibStream(inputStream, CompressionMode.Decompress))
{
deflateStream.CopyTo(outputStream);
return outputStream.ToArray();
}
}
// Function to pad a size to the nearest multiple of paddingSize
int PadToNearestSize(int paddingSize, int size)
{
int maxPadSize = paddingSize - 1;
return (size + maxPadSize) & ~maxPadSize;
}
byte[] GetPadBytesFromLoopingString(string padString, int length)
{
List<byte> paddingBytes = new List<byte>();
while (paddingBytes.Count < length)
{
foreach (char c in padString)
{
if (paddingBytes.Count >= length)
break;
paddingBytes.Add((byte)c);
}
}
return paddingBytes.ToArray();
}
}