diff --git a/README.md b/README.md index 058ccdf8..1477ddaf 100644 --- a/README.md +++ b/README.md @@ -81,8 +81,15 @@ might not work with them. #### From a standalone file ```cs -// Read file +// Read file with default episode (EP5) and encoding (ASCII) var svmap = Reader.ReadFromFile("0.svmap"); + +// Read file with a specific episode and default encoding +var svmap = Reader.ReadFromFile("0.svmap", Episode.Ep6); + +// Read file with a specific episode and a specific encoding +var windows1252Encoding = CodePagesEncodingProvider.Instance.GetEncoding(1252); +var svmap = Reader.ReadFromFile("0.svmap", Episode.Ep6, windows1252Encoding); ``` #### From data.saf @@ -122,14 +129,28 @@ All of the Episode 8 `BinarySData` formats have `CSV` support. var item = Item.ReadFromCsv("Item.csv"); ``` +### Encoding + +When reading files, the default encoding is `ASCII`. If you want to read a file with a different encoding, you can +specify it as a parameter when calling the ReadFromFile/Json/Csv methods. + ### Writing #### Writing a standalone file -After modifying the file, you can save it in its original format by calling the `Write` method +After modifying the file, you can save it in its original format by calling the `Write` method. If you specified +the episode and encoding when reading the file, you don't need to specify them again when writing it. ```cs +// Write file with previously defined episode and encoding (defined when reading the file) svmap.Write("0_modified.svmap"); + +// Write file with a specific episode and default encoding +svmap.Write("0_modified.svmap", Episode.Ep6); + +// Write file with a specific episode and a specific encoding +var windows1252Encoding = CodePagesEncodingProvider.Instance.GetEncoding(1252); +svmap.Write("0_modified.svmap", Episode.Ep6, windows1252Encoding); ``` #### Writing as JSON @@ -150,6 +171,11 @@ All of the Episode 8 `BinarySData` formats have `CSV` support. item.WriteCsv("Item.csv") ``` +### Encoding + +When writing files, the default encoding is `ASCII`. If you want to write a file with a different encoding, you can +specify it as a parameter when calling the `Write`, `WriteJson` and `WriteCsv` methods. + ```cs ## Samples diff --git a/src/Parsec/Attributes/FixedLengthStringAttribute.cs b/src/Parsec/Attributes/FixedLengthStringAttribute.cs index f8a3d2ea..36f8ed29 100644 --- a/src/Parsec/Attributes/FixedLengthStringAttribute.cs +++ b/src/Parsec/Attributes/FixedLengthStringAttribute.cs @@ -1,12 +1,9 @@ -using System.Text; - -namespace Parsec.Attributes; +namespace Parsec.Attributes; [AttributeUsage(AttributeTargets.Property)] public class FixedLengthStringAttribute : Attribute { public int Length { get; set; } - public Encoding Encoding { get; set; } = Encoding.ASCII; public bool IncludeStringTerminator { get; set; } public bool IsString256 { get; set; } public string Suffix { get; set; } @@ -21,9 +18,8 @@ public FixedLengthStringAttribute(bool isString256) IsString256 = isString256; } - public FixedLengthStringAttribute(Encoding encoding, int length, bool removeStringTerminator) + public FixedLengthStringAttribute(int length, bool removeStringTerminator) { - Encoding = encoding; Length = length; IncludeStringTerminator = removeStringTerminator; } diff --git a/src/Parsec/Attributes/LengthPrefixedStringAttribute.cs b/src/Parsec/Attributes/LengthPrefixedStringAttribute.cs index bd09785f..dea71831 100644 --- a/src/Parsec/Attributes/LengthPrefixedStringAttribute.cs +++ b/src/Parsec/Attributes/LengthPrefixedStringAttribute.cs @@ -1,11 +1,8 @@ -using System.Text; - -namespace Parsec.Attributes; +namespace Parsec.Attributes; [AttributeUsage(AttributeTargets.Property)] public class LengthPrefixedStringAttribute : Attribute { - public Encoding Encoding { get; set; } = Encoding.ASCII; public bool IncludeStringTerminator { get; set; } public string Suffix { get; set; } @@ -14,19 +11,8 @@ public LengthPrefixedStringAttribute() { } - public LengthPrefixedStringAttribute(Encoding encoding) - { - Encoding = encoding; - } - public LengthPrefixedStringAttribute(bool includeStringTerminator) { IncludeStringTerminator = includeStringTerminator; } - - public LengthPrefixedStringAttribute(Encoding encoding, bool includeStringTerminator) - { - Encoding = encoding; - IncludeStringTerminator = includeStringTerminator; - } } diff --git a/src/Parsec/Common/ICsv.cs b/src/Parsec/Common/ICsv.cs index be2ec2f0..76af215b 100644 --- a/src/Parsec/Common/ICsv.cs +++ b/src/Parsec/Common/ICsv.cs @@ -1,4 +1,6 @@ -namespace Parsec.Common; +using System.Text; + +namespace Parsec.Common; public interface ICsv { @@ -6,5 +8,6 @@ public interface ICsv /// Exports the file in csv format /// /// Export file path - void WriteCsv(string outputPath); + /// File encoding + void WriteCsv(string outputPath, Encoding encoding = null); } diff --git a/src/Parsec/Extensions/BinaryExtensions.cs b/src/Parsec/Extensions/BinaryExtensions.cs index 0729fcb7..8a422058 100644 --- a/src/Parsec/Extensions/BinaryExtensions.cs +++ b/src/Parsec/Extensions/BinaryExtensions.cs @@ -53,10 +53,8 @@ public static IEnumerable GetLengthPrefixedBytes(this string str, Encoding var buffer = new List(); string finalStr = includeStringTerminator ? str + '\0' : str; - buffer.AddRange(finalStr.Length.GetBytes()); buffer.AddRange(finalStr.GetBytes(encoding)); - return buffer; } diff --git a/src/Parsec/Helpers/FileHelper.cs b/src/Parsec/Helpers/FileHelper.cs index 8a8c9f2f..4f327bcc 100644 --- a/src/Parsec/Helpers/FileHelper.cs +++ b/src/Parsec/Helpers/FileHelper.cs @@ -1,6 +1,4 @@ -using System.IO; -using System.Linq; -using Parsec.Readers; +using System.Text; namespace Parsec.Helpers; @@ -17,9 +15,12 @@ public static class FileHelper /// /// File path /// The file content + /// File encoding /// Makes a backup of the file if it already existed - public static bool WriteFile(string path, string content, bool backupIfExists = false) + public static bool WriteFile(string path, string content, Encoding encoding = null, bool backupIfExists = false) { + encoding ??= Encoding.ASCII; + if (backupIfExists && FileExists(path)) { DeleteFile($"{path}.bak"); @@ -36,7 +37,7 @@ public static bool WriteFile(string path, string content, bool backupIfExists = try { - File.WriteAllText(path, content); + File.WriteAllText(path, content, encoding); return true; } catch diff --git a/src/Parsec/Parsec.csproj b/src/Parsec/Parsec.csproj index 0982bc7c..f9447159 100644 --- a/src/Parsec/Parsec.csproj +++ b/src/Parsec/Parsec.csproj @@ -18,10 +18,10 @@ - + - - + + diff --git a/src/Parsec/Readers/Reader.cs b/src/Parsec/Readers/Reader.cs index e8f6d33b..1348adfe 100644 --- a/src/Parsec/Readers/Reader.cs +++ b/src/Parsec/Readers/Reader.cs @@ -15,19 +15,22 @@ public static class Reader /// /// File path /// File episode + /// File encoding /// Shaiya File Format Type /// T instance - public static T ReadFromFile(string path, Episode episode = Episode.EP5) where T : FileBase, new() => - FileBase.ReadFromFile(path, episode); + public static T ReadFromFile(string path, Episode episode = Episode.EP5, Encoding encoding = null) where T : FileBase, new() => + FileBase.ReadFromFile(path, episode, encoding); /// /// Reads a shaiya file format from a file /// /// File path /// File episode + /// File encoding /// Shaiya File Format Type - public static Task ReadFromFileAsync(string path, Episode episode = Episode.EP5) where T : FileBase, new() => - Task.FromResult(ReadFromFile(path, episode)); + public static Task ReadFromFileAsync(string path, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => + Task.FromResult(ReadFromFile(path, episode, encoding)); /// /// Reads the shaiya file format from a file @@ -35,9 +38,10 @@ public static class Reader /// File path /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromFile(string path, Type type, Episode episode = Episode.EP5) => - FileBase.ReadFromFile(path, type, episode); + public static FileBase ReadFromFile(string path, Type type, Episode episode = Episode.EP5, Encoding encoding = null) => + FileBase.ReadFromFile(path, type, episode, encoding); /// /// Reads the shaiya file format from a file @@ -45,8 +49,9 @@ public static FileBase ReadFromFile(string path, Type type, Episode episode = Ep /// File path /// FileBase child type to be read /// File episode - public static Task ReadFromFileAsync(string path, Type type, Episode episode = Episode.EP5) => - Task.FromResult(ReadFromFile(path, type, episode)); + /// File encoding + public static Task ReadFromFileAsync(string path, Type type, Episode episode = Episode.EP5, Encoding encoding = null) => + Task.FromResult(ReadFromFile(path, type, episode, encoding)); /// /// Reads a shaiya file format from a buffer (byte array) @@ -54,10 +59,12 @@ public static Task ReadFromFileAsync(string path, Type type, Episode e /// File name /// File buffer /// File episode + /// File encoding /// Shaiya File Format Type /// T instance - public static T ReadFromBuffer(string name, byte[] buffer, Episode episode = Episode.EP5) where T : FileBase, new() => - FileBase.ReadFromBuffer(name, buffer, episode); + public static T ReadFromBuffer(string name, byte[] buffer, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => + FileBase.ReadFromBuffer(name, buffer, episode, encoding); /// @@ -66,8 +73,10 @@ public static Task ReadFromFileAsync(string path, Type type, Episode e /// File name /// File buffer /// File episode + /// File encoding /// Shaiya File Format Type - public static Task ReadFromBufferAsync(string name, byte[] buffer, Episode episode = Episode.EP5) where T : FileBase, new() => + public static Task ReadFromBufferAsync(string name, byte[] buffer, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => Task.FromResult(ReadFromBuffer(name, buffer, episode)); /// @@ -77,9 +86,10 @@ public static Task ReadFromFileAsync(string path, Type type, Episode e /// File buffer /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Episode episode = Episode.EP5) => - FileBase.ReadFromBuffer(name, buffer, type); + public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Episode episode = Episode.EP5, Encoding encoding = null) => + FileBase.ReadFromBuffer(name, buffer, type, episode, encoding); /// /// Reads the shaiya file format from a buffer (byte array) @@ -88,8 +98,10 @@ public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Epi /// File buffer /// FileBase child type to be read /// File episode - public static Task ReadFromBufferAsync(string name, byte[] buffer, Type type, Episode episode = Episode.EP5) => - Task.FromResult(ReadFromBuffer(name, buffer, type)); + /// File encoding + public static Task ReadFromBufferAsync(string name, byte[] buffer, Type type, Episode episode = Episode.EP5, + Encoding encoding = null) => + Task.FromResult(ReadFromBuffer(name, buffer, type, episode, encoding)); /// /// Reads a shaiya file format from a json file @@ -128,14 +140,11 @@ public static FileBase ReadFromJsonFile(string path, Type type, Encoding encodin if (Path.GetExtension(path) != ".json") throw new FileLoadException("The provided file to deserialize must be a valid json file"); - // Set default encoding encoding ??= Encoding.ASCII; - // Read json file content string jsonContent = File.ReadAllText(path, encoding); - // Deserialize into FileBase var deserializedObject = (FileBase)JsonConvert.DeserializeObject(jsonContent, type); - // Get file name without the ".json" extension + string fileNameWithoutJsonExtension = Path.GetFileNameWithoutExtension(path); if (deserializedObject == null) @@ -192,10 +201,8 @@ public static FileBase ReadFromJson(string name, string jsonText, Type type, Enc if (!type.GetBaseClassesAndInterfaces().Contains(typeof(FileBase))) throw new ArgumentException("Type must be a child of FileBase"); - // Set default encoding encoding ??= Encoding.ASCII; - // Deserialize into FileBase var deserializedObject = (FileBase)JsonConvert.DeserializeObject(jsonText, type); if (deserializedObject == null) return null; @@ -221,9 +228,11 @@ public static Task ReadFromJsonAsync(string name, string jsonText, Typ /// instance /// instance /// File episode + /// File encoding /// FileBase instance - public static T ReadFromData(Data data, SFile file, Episode episode = Episode.EP5) where T : FileBase, new() => - FileBase.ReadFromData(data, file, episode); + public static T ReadFromData(Data data, SFile file, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => + FileBase.ReadFromData(data, file, episode, encoding); /// /// Reads the shaiya file format from a buffer (byte array) within a instance @@ -231,8 +240,10 @@ public static Task ReadFromJsonAsync(string name, string jsonText, Typ /// instance /// instance /// File episode - public static Task ReadFromDataAsync(Data data, SFile file, Episode episode = Episode.EP5) where T : FileBase, new() => - Task.FromResult(ReadFromData(data, file, episode)); + /// File encoding + public static Task ReadFromDataAsync(Data data, SFile file, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => + Task.FromResult(ReadFromData(data, file, episode, encoding)); /// /// Reads the shaiya file format from a buffer (byte array) within a instance @@ -241,9 +252,10 @@ public static Task ReadFromJsonAsync(string name, string jsonText, Typ /// instance /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromData(Data data, SFile file, Type type, Episode episode = Episode.EP5) => - FileBase.ReadFromData(data, file, type, episode); + public static FileBase ReadFromData(Data data, SFile file, Type type, Episode episode = Episode.EP5, Encoding encoding = null) => + FileBase.ReadFromData(data, file, type, episode, encoding); /// /// Reads the shaiya file format from a buffer (byte array) within a instance @@ -252,6 +264,8 @@ public static FileBase ReadFromData(Data data, SFile file, Type type, Episode ep /// instance /// FileBase child type to be read /// File episode - public static Task ReadFromDataAsync(Data data, SFile file, Type type, Episode episode = Episode.EP5) => - Task.FromResult(ReadFromData(data, file, type, episode)); + /// File encoding + public static Task ReadFromDataAsync(Data data, SFile file, Type type, Episode episode = Episode.EP5, + Encoding encoding = null) => + Task.FromResult(ReadFromData(data, file, type, episode, encoding)); } diff --git a/src/Parsec/Shaiya/Core/FileBase.cs b/src/Parsec/Shaiya/Core/FileBase.cs index b063801f..c3003c5b 100644 --- a/src/Parsec/Shaiya/Core/FileBase.cs +++ b/src/Parsec/Shaiya/Core/FileBase.cs @@ -45,7 +45,7 @@ public abstract class FileBase : IFileBase, IExportable /// public void WriteJson(string path, params string[] ignoredPropertyNames) => - FileHelper.WriteFile(path, JsonSerialize(this, ignoredPropertyNames)); + FileHelper.WriteFile(path, JsonSerialize(this, ignoredPropertyNames), Encoding); /// public virtual string JsonSerialize(FileBase obj, params string[] ignoredPropertyNames) @@ -174,7 +174,7 @@ public virtual void Read() if (!property.IsDefined(typeof(ShaiyaPropertyAttribute))) continue; - object value = ReflectionHelper.ReadProperty(_binaryReader, type, this, property, Episode); + object value = ReflectionHelper.ReadProperty(_binaryReader, type, this, property, Episode, Encoding); property.SetValue(this, Convert.ChangeType(value, property.PropertyType)); // Set episode based on property @@ -197,11 +197,15 @@ public virtual void Read() /// /// File path /// File episode + /// File encoding /// Shaiya File Format Type /// T instance - public static T ReadFromFile(string path, Episode episode = Episode.EP5) where T : FileBase, new() + public static T ReadFromFile(string path, Episode episode = Episode.EP5, Encoding encoding = null) where T : FileBase, new() { - var instance = new T { Path = path, _binaryReader = new SBinaryReader(path), Episode = episode }; + var instance = new T + { + Path = path, _binaryReader = new SBinaryReader(path), Episode = episode, Encoding = encoding ?? Encoding.ASCII + }; if (instance is IEncryptable encryptableInstance) encryptableInstance.DecryptBuffer(); @@ -218,8 +222,9 @@ public virtual void Read() /// File path /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromFile(string path, Type type, Episode episode = Episode.EP5) + public static FileBase ReadFromFile(string path, Type type, Episode episode = Episode.EP5, Encoding encoding = null) { if (!type.GetBaseClassesAndInterfaces().Contains(typeof(FileBase))) throw new ArgumentException("Type must be a child of FileBase"); @@ -229,6 +234,7 @@ public static FileBase ReadFromFile(string path, Type type, Episode episode = Ep instance.Path = path; instance._binaryReader = binaryReader; instance.Episode = episode; + instance.Encoding = encoding ?? Encoding.ASCII; if (instance is IEncryptable encryptableInstance) encryptableInstance.DecryptBuffer(); @@ -245,11 +251,16 @@ public static FileBase ReadFromFile(string path, Type type, Episode episode = Ep /// File name /// File buffer /// File episode + /// File encoding /// Shaiya File Format Type /// T instance - public static T ReadFromBuffer(string name, byte[] buffer, Episode episode = Episode.EP5) where T : FileBase, new() + public static T ReadFromBuffer(string name, byte[] buffer, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() { - var instance = new T { Path = name, _binaryReader = new SBinaryReader(buffer), Episode = episode }; + var instance = new T + { + Path = name, _binaryReader = new SBinaryReader(buffer), Episode = episode, Encoding = encoding ?? Encoding.ASCII + }; if (instance is IEncryptable encryptableInstance) encryptableInstance.DecryptBuffer(); @@ -267,8 +278,9 @@ public static FileBase ReadFromFile(string path, Type type, Episode episode = Ep /// File buffer /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Episode episode = Episode.EP5) + public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Episode episode = Episode.EP5, Encoding encoding = null) { if (!type.GetBaseClassesAndInterfaces().Contains(typeof(FileBase))) throw new ArgumentException("Type must be a child of FileBase"); @@ -277,6 +289,7 @@ public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Epi instance.Path = name; instance._binaryReader = new SBinaryReader(buffer); instance.Episode = episode; + instance.Encoding = encoding ?? Encoding.ASCII; if (instance is IEncryptable encryptableInstance) encryptableInstance.DecryptBuffer(); @@ -293,9 +306,11 @@ public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Epi /// instance /// instance /// File episode + /// File encoding /// FileBase instance - public static T ReadFromData(Data.Data data, SFile file, Episode episode = Episode.EP5) where T : FileBase, new() => - ReadFromBuffer(file.Name, data.GetFileBuffer(file), episode); + public static T ReadFromData(Data.Data data, SFile file, Episode episode = Episode.EP5, Encoding encoding = null) + where T : FileBase, new() => + ReadFromBuffer(file.Name, data.GetFileBuffer(file), episode, encoding); /// /// Reads the shaiya file format from a buffer (byte array) within a instance @@ -304,12 +319,13 @@ public static FileBase ReadFromBuffer(string name, byte[] buffer, Type type, Epi /// instance /// FileBase child type to be read /// File episode + /// File encoding /// FileBase instance - public static FileBase ReadFromData(Data.Data data, SFile file, Type type, Episode episode = Episode.EP5) + public static FileBase ReadFromData(Data.Data data, SFile file, Type type, Episode episode = Episode.EP5, Encoding encoding = null) { if (!data.FileIndex.ContainsValue(file)) throw new FileNotFoundException("The provided SFile instance is not part of the Data"); - return ReadFromBuffer(file.Name, data.GetFileBuffer(file), type, episode); + return ReadFromBuffer(file.Name, data.GetFileBuffer(file), type, episode, encoding); } } diff --git a/src/Parsec/Shaiya/Core/ReflectionHelper.cs b/src/Parsec/Shaiya/Core/ReflectionHelper.cs index 83a5cd92..47881891 100644 --- a/src/Parsec/Shaiya/Core/ReflectionHelper.cs +++ b/src/Parsec/Shaiya/Core/ReflectionHelper.cs @@ -15,9 +15,11 @@ public static object ReadProperty( Type parentType, object parentInstance, PropertyInfo propertyInfo, - Episode episode = Episode.Unknown + Episode episode = Episode.Unknown, + Encoding encoding = null ) { + encoding ??= Encoding.ASCII; var type = propertyInfo.PropertyType; var attributes = propertyInfo.GetCustomAttributes().ToList(); @@ -79,7 +81,7 @@ public static object ReadProperty( if (!property.IsDefined(typeof(ShaiyaPropertyAttribute))) continue; - object propertyValue = ReadProperty(binaryReader, genericArgumentType, item, property, episode); + object propertyValue = ReadProperty(binaryReader, genericArgumentType, item, property, episode, encoding); property.SetValue(item, propertyValue); } } @@ -153,7 +155,7 @@ public static object ReadProperty( if (!property.IsDefined(typeof(ShaiyaPropertyAttribute))) continue; - object propertyValue = ReadProperty(binaryReader, genericItemType, item, property, episode); + object propertyValue = ReadProperty(binaryReader, genericItemType, item, property, episode, encoding); property.SetValue(item, propertyValue); } } @@ -164,16 +166,14 @@ public static object ReadProperty( return list; case LengthPrefixedStringAttribute lengthPrefixedStringAttribute: - string lengthPrefixedStr = binaryReader.ReadString(lengthPrefixedStringAttribute.Encoding, - !lengthPrefixedStringAttribute.IncludeStringTerminator); - + string lengthPrefixedStr = binaryReader.ReadString(encoding, !lengthPrefixedStringAttribute.IncludeStringTerminator); return lengthPrefixedStr; case FixedLengthStringAttribute fixedLengthStringAttribute: if (fixedLengthStringAttribute.IsString256) return new String256(binaryReader).Value; - string fixedLengthStr = binaryReader.ReadString(fixedLengthStringAttribute.Encoding, fixedLengthStringAttribute.Length, + string fixedLengthStr = binaryReader.ReadString(encoding, fixedLengthStringAttribute.Length, !fixedLengthStringAttribute.IncludeStringTerminator); return fixedLengthStr; @@ -342,9 +342,9 @@ public static IEnumerable GetPropertyBytes(Type parentType, object obj, Pr case FixedLengthStringAttribute fixedLengthStringAttribute: if (fixedLengthStringAttribute.IsString256) - return ((string)propertyValue).PadRight(256, '\0').GetBytes(fixedLengthStringAttribute.Encoding); + return ((string)propertyValue).PadRight(256, '\0').GetBytes(encoding); return ((string)propertyValue + fixedLengthStringAttribute.Suffix) - .PadRight(fixedLengthStringAttribute.Length, '\0').GetBytes(fixedLengthStringAttribute.Encoding); + .PadRight(fixedLengthStringAttribute.Length, '\0').GetBytes(encoding); } } diff --git a/src/Parsec/Shaiya/EFT/Effect.cs b/src/Parsec/Shaiya/EFT/Effect.cs index 8854a5c8..168b9e91 100644 --- a/src/Parsec/Shaiya/EFT/Effect.cs +++ b/src/Parsec/Shaiya/EFT/Effect.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Extensions; using Parsec.Readers; using Parsec.Shaiya.Common; @@ -84,7 +85,7 @@ public Effect() public Effect(SBinaryReader binaryReader, EFTFormat format, int index) { Index = index; - Name = binaryReader.ReadString(); + Name = binaryReader.ReadString(Encoding.ASCII); Unknown1 = binaryReader.Read(); Unknown2 = binaryReader.Read(); @@ -175,7 +176,7 @@ public IEnumerable GetBytes(params object[] options) format = (EFTFormat)options[0]; var buffer = new List(); - buffer.AddRange(Name.GetLengthPrefixedBytes()); + buffer.AddRange(Name.GetLengthPrefixedBytes(Encoding.ASCII)); buffer.AddRange(Unknown1.GetBytes()); buffer.AddRange(Unknown2.GetBytes()); diff --git a/src/Parsec/Shaiya/EFT/EffectSequence.cs b/src/Parsec/Shaiya/EFT/EffectSequence.cs index e6002120..e02a4345 100644 --- a/src/Parsec/Shaiya/EFT/EffectSequence.cs +++ b/src/Parsec/Shaiya/EFT/EffectSequence.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Extensions; using Parsec.Readers; using Parsec.Shaiya.Core; @@ -14,7 +15,7 @@ public EffectSequence() public EffectSequence(SBinaryReader binaryReader) { - Name = binaryReader.ReadString(); + Name = binaryReader.ReadString(Encoding.ASCII); int recordCount = binaryReader.Read(); for (int i = 0; i < recordCount; i++) @@ -30,7 +31,7 @@ public EffectSequence(SBinaryReader binaryReader) public IEnumerable GetBytes(params object[] options) { var buffer = new List(); - buffer.AddRange(Name.GetLengthPrefixedBytes()); + buffer.AddRange(Name.GetLengthPrefixedBytes(Encoding.ASCII)); buffer.AddRange(Records.GetBytes()); return buffer; } diff --git a/src/Parsec/Shaiya/Item/IItemDefinition.cs b/src/Parsec/Shaiya/Item/IItemDefinition.cs index 9d10163b..bddef533 100644 --- a/src/Parsec/Shaiya/Item/IItemDefinition.cs +++ b/src/Parsec/Shaiya/Item/IItemDefinition.cs @@ -3,5 +3,6 @@ public interface IItemDefinition { public byte Type { get; set; } + public byte TypeId { get; set; } } diff --git a/src/Parsec/Shaiya/Item/Item.cs b/src/Parsec/Shaiya/Item/Item.cs index 998efac4..c0124b5a 100644 --- a/src/Parsec/Shaiya/Item/Item.cs +++ b/src/Parsec/Shaiya/Item/Item.cs @@ -1,4 +1,5 @@ using System.Globalization; +using System.Text; using CsvHelper; using Newtonsoft.Json; using Parsec.Common; @@ -11,58 +12,28 @@ public sealed class Item : SData.SData, ICsv [JsonIgnore] public Dictionary<(byte type, byte typeId), IItemDefinition> ItemIndex = new(); - public int MaxType { get; set; } - public List Types { get; } = new(); + public int MaxItemType { get; set; } - /// - public void WriteCsv(string outputPath) - { - switch (Episode) - { - case Episode.Unknown: - case Episode.EP4: - case Episode.EP5: - default: - { - var items = ItemIndex.Values.ToList().Cast().ToList(); - using var writer = new StreamWriter(outputPath); - using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); - csvWriter.WriteRecords(items); - break; - } - case Episode.EP6: - case Episode.EP7: - { - var items = ItemIndex.Values.ToList().Cast().ToList(); - using var writer = new StreamWriter(outputPath); - using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); - csvWriter.WriteRecords(items); - break; - } - case Episode.EP8: - throw new Exception("Episode 8 must use the DBItemData class."); - } - } + public List ItemTypes { get; } = new(); public override void Read() { - MaxType = _binaryReader.Read(); - for (int i = 0; i < MaxType; i++) + MaxItemType = _binaryReader.Read(); + for (int i = 0; i < MaxItemType; i++) { - var type = new Type(_binaryReader, i + 1, Episode, ItemIndex); - Types.Add(type); + var itemType = new ItemType(_binaryReader, i + 1, Episode, ItemIndex, Encoding); + ItemTypes.Add(itemType); } } public override IEnumerable GetBytes(Episode episode = Episode.Unknown) { var buffer = new List(); + buffer.AddRange(MaxItemType.GetBytes()); - buffer.AddRange(MaxType.GetBytes()); - - for (int i = 1; i <= MaxType; i++) + for (int i = 1; i <= MaxItemType; i++) { - var type = Types.SingleOrDefault(t => t.Id == i); + var type = ItemTypes.SingleOrDefault(t => t.Id == i); // When type isn't part of the item, its MaxTypeId = 0 must be written to the file anyways if (type == null) @@ -71,7 +42,7 @@ public override IEnumerable GetBytes(Episode episode = Episode.Unknown) continue; } - buffer.AddRange(type.GetBytes(episode)); + buffer.AddRange(type.GetBytes(episode, Encoding)); } return buffer; @@ -82,11 +53,14 @@ public override IEnumerable GetBytes(Episode episode = Episode.Unknown) /// /// csv file path /// The Item.SData format + /// Item.SData encoding /// instance - public static Item ReadFromCsv(string csvPath, Episode episode) + public static Item ReadFromCsv(string csvPath, Episode episode, Encoding encoding = null) { + encoding ??= Encoding.ASCII; + // Create Item.SData instance - var item = new Item { Episode = episode }; + var item = new Item { Episode = episode, Encoding = encoding }; var itemDefinitions = new List(); // Read all item definitions from csv file @@ -98,7 +72,7 @@ public static Item ReadFromCsv(string csvPath, Episode episode) default: { // Read item definitions from csv - using var reader = new StreamReader(csvPath); + using var reader = new StreamReader(csvPath, encoding); using var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture); var records = csvReader.GetRecords().ToList(); @@ -110,7 +84,7 @@ public static Item ReadFromCsv(string csvPath, Episode episode) case Episode.EP7: { // Read item definitions from csv - using var reader = new StreamReader(csvPath); + using var reader = new StreamReader(csvPath, encoding); using var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture); var records = csvReader.GetRecords().ToList(); @@ -123,24 +97,56 @@ public static Item ReadFromCsv(string csvPath, Episode episode) } // Get max type from items - item.MaxType = itemDefinitions.Max(x => x.Type); + item.MaxItemType = itemDefinitions.Max(x => x.Type); // Add all items to item index var itemIndex = itemDefinitions.ToDictionary(itemDef => (itemDef.Type, itemDef.TypeId)); item.ItemIndex = itemIndex; // Create item types - for (int i = 1; i <= item.MaxType; i++) + for (int i = 1; i <= item.MaxItemType; i++) { // Get items for this type var items = item.ItemIndex.Values.Where(x => x.Type == i).ToList(); int maxTypeId = items.Count == 0 ? 0 : items.Max(x => x.TypeId); - var type = new Type(i, maxTypeId, items); - item.Types.Add(type); + var type = new ItemType(i, maxTypeId, items); + item.ItemTypes.Add(type); } return item; } + + /// + public void WriteCsv(string outputPath, Encoding encoding = null) + { + encoding ??= Encoding.ASCII; + + switch (Episode) + { + case Episode.Unknown: + case Episode.EP4: + case Episode.EP5: + default: + { + var items = ItemIndex.Values.ToList().Cast().ToList(); + using var writer = new StreamWriter(outputPath, false, encoding); + using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); + csvWriter.WriteRecords(items); + break; + } + case Episode.EP6: + case Episode.EP7: + { + var items = ItemIndex.Values.ToList().Cast().ToList(); + using var writer = new StreamWriter(outputPath, false, encoding); + using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); + csvWriter.WriteRecords(items); + break; + } + case Episode.EP8: + throw new Exception("Episode 8 must use the DBItemData class."); + } + } } diff --git a/src/Parsec/Shaiya/Item/ItemDefinitionEp5.cs b/src/Parsec/Shaiya/Item/ItemDefinitionEp5.cs index 59c0eb57..d77367e0 100644 --- a/src/Parsec/Shaiya/Item/ItemDefinitionEp5.cs +++ b/src/Parsec/Shaiya/Item/ItemDefinitionEp5.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Extensions; using Parsec.Readers; using Parsec.Shaiya.Core; @@ -12,10 +13,10 @@ public ItemDefinitionEp5() { } - public ItemDefinitionEp5(SBinaryReader binaryReader) + public ItemDefinitionEp5(SBinaryReader binaryReader, Encoding encoding) { - Name = binaryReader.ReadString(); - Description = binaryReader.ReadString(); + Name = binaryReader.ReadString(encoding); + Description = binaryReader.ReadString(encoding); Type = binaryReader.Read(); TypeId = binaryReader.Read(); Model = binaryReader.Read(); @@ -141,9 +142,16 @@ public ItemDefinitionEp5(SBinaryReader binaryReader) public IEnumerable GetBytes(params object[] options) { + var encoding = Encoding.ASCII; + + if (options.Length > 0 && options[0] is Encoding stringEncoding) + { + encoding = stringEncoding; + } + var buffer = new List(); - buffer.AddRange(Name.GetLengthPrefixedBytes()); - buffer.AddRange(Description.GetLengthPrefixedBytes()); + buffer.AddRange(Name.GetLengthPrefixedBytes(encoding)); + buffer.AddRange(Description.GetLengthPrefixedBytes(encoding)); buffer.Add(Type); buffer.Add(TypeId); buffer.Add(Model); diff --git a/src/Parsec/Shaiya/Item/ItemDefinitionEp6.cs b/src/Parsec/Shaiya/Item/ItemDefinitionEp6.cs index eaa09b8e..75f468b8 100644 --- a/src/Parsec/Shaiya/Item/ItemDefinitionEp6.cs +++ b/src/Parsec/Shaiya/Item/ItemDefinitionEp6.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Extensions; using Parsec.Readers; using Parsec.Shaiya.Core; @@ -12,10 +13,10 @@ public ItemDefinitionEp6() { } - public ItemDefinitionEp6(SBinaryReader binaryReader) + public ItemDefinitionEp6(SBinaryReader binaryReader, Encoding encoding) { - Name = binaryReader.ReadString(); - Description = binaryReader.ReadString(); + Name = binaryReader.ReadString(encoding); + Description = binaryReader.ReadString(encoding); Type = binaryReader.Read(); TypeId = binaryReader.Read(); Model = binaryReader.Read(); @@ -189,9 +190,16 @@ public ItemDefinitionEp6(SBinaryReader binaryReader) public IEnumerable GetBytes(params object[] options) { + var encoding = Encoding.ASCII; + + if (options.Length > 0 && options[0] is Encoding stringEncoding) + { + encoding = stringEncoding; + } + var buffer = new List(); - buffer.AddRange(Name.GetLengthPrefixedBytes()); - buffer.AddRange(Description.GetLengthPrefixedBytes()); + buffer.AddRange(Name.GetLengthPrefixedBytes(encoding)); + buffer.AddRange(Description.GetLengthPrefixedBytes(encoding)); buffer.Add(Type); buffer.Add(TypeId); buffer.Add(Model); diff --git a/src/Parsec/Shaiya/Item/Type.cs b/src/Parsec/Shaiya/Item/ItemType.cs similarity index 68% rename from src/Parsec/Shaiya/Item/Type.cs rename to src/Parsec/Shaiya/Item/ItemType.cs index 088d240b..fc6f30f6 100644 --- a/src/Parsec/Shaiya/Item/Type.cs +++ b/src/Parsec/Shaiya/Item/ItemType.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Common; using Parsec.Extensions; using Parsec.Readers; @@ -6,21 +7,28 @@ namespace Parsec.Shaiya.Item; -public sealed class Type : IBinary +public sealed class ItemType : IBinary { + public int Id { get; set; } + + public int MaxTypeId { get; set; } + + public List ItemDefinitions { get; } = new(); + [JsonConstructor] - public Type() + public ItemType() { } - public Type(int id, int maxTypeId, IEnumerable itemDefinitions) + public ItemType(int id, int maxTypeId, IEnumerable itemDefinitions) { Id = id; MaxTypeId = maxTypeId; ItemDefinitions = itemDefinitions.ToList(); } - public Type(SBinaryReader binaryReader, int id, Episode episode, IDictionary<(byte type, byte typeId), IItemDefinition> itemIndex) + public ItemType(SBinaryReader binaryReader, int id, Episode episode, IDictionary<(byte type, byte typeId), IItemDefinition> itemIndex, + Encoding encoding) { Id = id; MaxTypeId = binaryReader.Read(); @@ -31,12 +39,12 @@ public Type(SBinaryReader binaryReader, int id, Episode episode, IDictionary<(by { case Episode.EP5: default: - var itemEp5 = new ItemDefinitionEp5(binaryReader); + var itemEp5 = new ItemDefinitionEp5(binaryReader, encoding); ItemDefinitions.Add(itemEp5); itemIndex.Add((itemEp5.Type, itemEp5.TypeId), itemEp5); break; case Episode.EP6: - var itemEp6 = new ItemDefinitionEp6(binaryReader); + var itemEp6 = new ItemDefinitionEp6(binaryReader, encoding); ItemDefinitions.Add(itemEp6); itemIndex.Add((itemEp6.Type, itemEp6.TypeId), itemEp6); break; @@ -44,32 +52,35 @@ public Type(SBinaryReader binaryReader, int id, Episode episode, IDictionary<(by } } - public int Id { get; set; } - public int MaxTypeId { get; set; } - public List ItemDefinitions { get; } = new(); - public IEnumerable GetBytes(params object[] options) { - var format = Episode.EP5; + var episode = Episode.EP5; + var encoding = Encoding.ASCII; - if (options.Length > 0) - format = (Episode)options[0]; + if (options.Length > 0 && options[0] is Episode episodeOption) + { + episode = episodeOption; + } - var buffer = new List(); + if (options.Length > 1 && options[1] is Encoding encodingOption) + { + encoding = encodingOption; + } + var buffer = new List(); buffer.AddRange(MaxTypeId.GetBytes()); foreach (var itemDefinition in ItemDefinitions) { // Add item definitions based on format - switch (format) + switch (episode) { case Episode.EP5: default: - buffer.AddRange(((ItemDefinitionEp5)itemDefinition).GetBytes()); + buffer.AddRange(((ItemDefinitionEp5)itemDefinition).GetBytes(encoding)); break; case Episode.EP6: - buffer.AddRange(((ItemDefinitionEp6)itemDefinition).GetBytes()); + buffer.AddRange(((ItemDefinitionEp6)itemDefinition).GetBytes(encoding)); break; } } diff --git a/src/Parsec/Shaiya/Monster/Monster.cs b/src/Parsec/Shaiya/Monster/Monster.cs index a22745af..a271e6b3 100644 --- a/src/Parsec/Shaiya/Monster/Monster.cs +++ b/src/Parsec/Shaiya/Monster/Monster.cs @@ -1,4 +1,5 @@ using System.Globalization; +using System.Text; using CsvHelper; using Newtonsoft.Json; using Parsec.Common; @@ -22,19 +23,12 @@ public Monster(List records) public override string Extension => "SData"; - public void WriteCsv(string outputPath) - { - using var writer = new StreamWriter(outputPath); - using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); - csvWriter.WriteRecords(Records); - } - public override void Read() { int recordCount = _binaryReader.Read(); for (int i = 0; i < recordCount; i++) { - var record = new MonsterRecord(_binaryReader); + var record = new MonsterRecord(_binaryReader, Encoding); Records.Add(record); } } @@ -45,16 +39,27 @@ public override void Read() /// Reads the Monster.SData format from a csv file /// /// csv file path + /// File encoding /// instance - public static Monster ReadFromCsv(string csvPath) + public static Monster ReadFromCsv(string csvPath, Encoding encoding = null) { + encoding ??= Encoding.ASCII; + // Read all monster definitions from csv file - using var reader = new StreamReader(csvPath); + using var reader = new StreamReader(csvPath, encoding); using var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture); var records = csvReader.GetRecords().ToList(); // Create monster instance - var monster = new Monster(records); + var monster = new Monster(records) { Encoding = encoding }; return monster; } + + public void WriteCsv(string outputPath, Encoding encoding = null) + { + encoding ??= Encoding.ASCII; + using var writer = new StreamWriter(outputPath, false, encoding); + using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); + csvWriter.WriteRecords(Records); + } } diff --git a/src/Parsec/Shaiya/Monster/MonsterRecord.cs b/src/Parsec/Shaiya/Monster/MonsterRecord.cs index 59c4aff5..2a3a70af 100644 --- a/src/Parsec/Shaiya/Monster/MonsterRecord.cs +++ b/src/Parsec/Shaiya/Monster/MonsterRecord.cs @@ -1,4 +1,5 @@ -using Newtonsoft.Json; +using System.Text; +using Newtonsoft.Json; using Parsec.Extensions; using Parsec.Readers; using Parsec.Shaiya.Core; @@ -12,9 +13,9 @@ public MonsterRecord() { } - public MonsterRecord(SBinaryReader binaryReader) + public MonsterRecord(SBinaryReader binaryReader, Encoding encoding) { - MobName = binaryReader.ReadString(); + MobName = binaryReader.ReadString(encoding); ModelId = binaryReader.Read(); Level = binaryReader.Read(); AI = binaryReader.Read(); diff --git a/src/Parsec/Shaiya/SData/BinarySData.cs b/src/Parsec/Shaiya/SData/BinarySData.cs index 826b04a1..e78a8e67 100644 --- a/src/Parsec/Shaiya/SData/BinarySData.cs +++ b/src/Parsec/Shaiya/SData/BinarySData.cs @@ -1,4 +1,5 @@ using System.Globalization; +using System.Text; using CsvHelper; using Parsec.Attributes; using Parsec.Common; @@ -71,9 +72,10 @@ public override IEnumerable GetBytes(Episode episode = Episode.Unknown) return buffer; } - public static T ReadFromCsv(string csvPath) where T : BinarySData, new() + public static T ReadFromCsv(string csvPath, Encoding encoding = null) where T : BinarySData, new() { - using var reader = new StreamReader(csvPath); + encoding ??= Encoding.ASCII; + using var reader = new StreamReader(csvPath, encoding); using var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture); // Read headers and records @@ -81,13 +83,14 @@ public override IEnumerable GetBytes(Episode episode = Episode.Unknown) var fields = csvReader.HeaderRecord?.Select(c => new BinarySDataField(c.ToLower())).ToList(); // Create the BinarySData instance with an empty header. The header is skipped entirely by the game so this isn't an issue. - var binarySData = new T { Header = new byte[128], Fields = fields, Records = records }; + var binarySData = new T { Header = new byte[128], Fields = fields, Records = records, Encoding = encoding }; return binarySData; } - public void WriteCsv(string outputPath) + public void WriteCsv(string outputPath, Encoding encoding = null) { - using var writer = new StreamWriter(outputPath); + encoding ??= Encoding.ASCII; + using var writer = new StreamWriter(outputPath, false, encoding); using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); csvWriter.WriteRecords(Records); } diff --git a/src/Parsec/Shaiya/Skill/Skill.cs b/src/Parsec/Shaiya/Skill/Skill.cs index bb18aad9..a903620c 100644 --- a/src/Parsec/Shaiya/Skill/Skill.cs +++ b/src/Parsec/Shaiya/Skill/Skill.cs @@ -1,4 +1,5 @@ using System.Globalization; +using System.Text; using CsvHelper; using Parsec.Common; using Parsec.Extensions; @@ -54,28 +55,32 @@ private int GetRecordCountPerSkill(Episode episode) }; } - public void WriteCsv(string outputPath) - { - using var writer = new StreamWriter(outputPath); - using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); - csvWriter.WriteRecords(Records); - } - /// /// Reads the Skill.SData format from a csv file /// /// csv file path /// File episode + /// File encoding /// instance - public static Skill ReadFromCsv(string csvPath, Episode episode) + public static Skill ReadFromCsv(string csvPath, Episode episode, Encoding encoding = null) { + encoding ??= Encoding.ASCII; + // Read all skill definitions from csv file - using var reader = new StreamReader(csvPath); + using var reader = new StreamReader(csvPath, encoding); using var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture); var records = csvReader.GetRecords().ToList(); // Create skill instance - var skill = new Skill { Episode = episode, Records = records }; + var skill = new Skill { Episode = episode, Records = records, Encoding = encoding }; return skill; } + + public void WriteCsv(string outputPath, Encoding encoding = null) + { + encoding ??= Encoding.ASCII; + using var writer = new StreamWriter(outputPath, false, encoding); + using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); + csvWriter.WriteRecords(Records); + } } diff --git a/tests/Parsec.Tests/Parsec.Tests.csproj b/tests/Parsec.Tests/Parsec.Tests.csproj index ddbf6d65..464ace53 100644 --- a/tests/Parsec.Tests/Parsec.Tests.csproj +++ b/tests/Parsec.Tests/Parsec.Tests.csproj @@ -9,6 +9,7 @@ + runtime; build; native; contentfiles; analyzers; buildtransitive @@ -706,5 +707,8 @@ PreserveNewest + + PreserveNewest + diff --git a/tests/Parsec.Tests/Shaiya/Monster/MonsterTests.cs b/tests/Parsec.Tests/Shaiya/Monster/MonsterTests.cs index 62582d39..1c5afe69 100644 --- a/tests/Parsec.Tests/Shaiya/Monster/MonsterTests.cs +++ b/tests/Parsec.Tests/Shaiya/Monster/MonsterTests.cs @@ -13,4 +13,18 @@ public void MonsterCsvTest() var monsterFromCsv = Parsec.Shaiya.Monster.Monster.ReadFromCsv(csvPath); Assert.Equal(monster.GetBytes(), monsterFromCsv.GetBytes()); } + + [Fact] + public void MonsterCsv_EncodingTest() + { + const string filePath = "Shaiya/Monster/Monster_1252.SData"; + const string csvPath = "Shaiya/Monster/Monster_1252.SData.csv"; + + var encoding = TestEncodings.Encoding1252; + + var monster = Reader.ReadFromFile(filePath, encoding: encoding); + monster.WriteCsv(csvPath, encoding); + var monsterFromCsv = Parsec.Shaiya.Monster.Monster.ReadFromCsv(csvPath, encoding); + Assert.Equal(monster.GetBytes(), monsterFromCsv.GetBytes()); + } }