From 5a762437c10a8a4c6f9f9dde58566afe2ed63ae4 Mon Sep 17 00:00:00 2001 From: Lucas Pedroza Date: Fri, 15 Dec 2023 17:10:02 +0100 Subject: [PATCH 1/2] Support many more deserialization types --- .../Serializer.Deserialize.Tests.cs | 129 +++++++++- .../Serializer.Serialize.Tests.cs | 1 - .../Serialization/Utf8FaunaReader.Tests.cs | 4 +- .../Serialization/Utf8FaunaWriter.Tests.cs | 1 + Fauna/Serialization/Serializer.Deserialize.cs | 228 +++++++++++++++--- Fauna/Serialization/TokenType.cs | 8 +- Fauna/Serialization/Utf8FaunaReader.cs | 12 +- Fauna/Types/Document.cs | 79 ++++++ Fauna/Types/Module.cs | 2 +- Fauna/Types/Ref.cs | 7 + 10 files changed, 428 insertions(+), 43 deletions(-) create mode 100644 Fauna/Types/Document.cs create mode 100644 Fauna/Types/Ref.cs diff --git a/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs b/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs index 9f2bac35..b3868c83 100644 --- a/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs +++ b/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs @@ -1,5 +1,4 @@ using Fauna.Serialization; -using Fauna.Serialization.Attributes; using Fauna.Types; using NUnit.Framework; @@ -8,7 +7,6 @@ namespace Fauna.Test.Serialization; [TestFixture] public partial class SerializerTests { - [Test] public void DeserializeValues() { @@ -32,6 +30,79 @@ public void DeserializeValues() } } + [Test] + public void DeserializeStringGeneric() + { + var result = Serializer.Deserialize("\"hello\""); + Assert.AreEqual("hello", result); + } + + [Test] + public void DeserializeNullableGeneric() + { + var result = Serializer.Deserialize("null"); + Assert.IsNull(result); + } + + [Test] + public void DeserializeIntGeneric() + { + var result = Serializer.Deserialize(@"{""@int"":""42""}"); + Assert.AreEqual(42, result); + } + + + [Test] + public void DeserializeDateGeneric() + { + var result = Serializer.Deserialize(@"{""@date"": ""2023-12-03""}"); + Assert.AreEqual(new DateTime(2023, 12, 3), result); + } + + [Test] + public void DeserializeTimeGeneric() + { + var result = Serializer.Deserialize(@"{""@time"": ""2023-12-03T05:52:10.000001-09:00""}"); + var expected = new DateTime(2023, 12, 3, 14, 52, 10, 0, DateTimeKind.Utc).AddTicks(10).ToLocalTime(); + Assert.AreEqual(expected, result); + } + + [Test] + public void DeserializeDocument() + { + const string given = @" + { + ""@doc"":{ + ""id"":""123"", + ""coll"":{""@mod"":""MyColl""}, + ""ts"":{""@time"":""2023-12-15T01:01:01.0010010Z""}, + ""user_field"":""user_value"" + } + }"; + + var actual = Serializer.Deserialize(given); + Assert.AreEqual("123", actual.Id); + Assert.AreEqual(new Module("MyColl"), actual.Collection); + Assert.AreEqual(DateTime.Parse("2023-12-15T01:01:01.0010010Z"), actual.Ts); + Assert.AreEqual("user_value", actual["user_field"]); + } + + [Test] + public void DeserializeRef() + { + const string given = @" + { + ""@ref"":{ + ""id"":""123"", + ""coll"":{""@mod"":""MyColl""} + } + }"; + + var actual = Serializer.Deserialize(given); + Assert.AreEqual("123", actual.Id); + Assert.AreEqual(new Module("MyColl"), actual.Collection); + } + [Test] public void DeserializeObject() { @@ -105,6 +176,23 @@ public void DeserializeEscapedObject() Assert.AreEqual(expected, result); } + [Test] + public void DeserializeIntoGenericDictionary() + { + const string given = @"{ +""k1"": { ""@int"": ""1"" }, +""k2"": { ""@int"": ""2"" }, +""k3"": { ""@int"": ""3"" } +}"; + var expected = new Dictionary() + { + {"k1", 1}, + {"k2", 2}, + {"k3", 3} + }; + var actual = Serializer.Deserialize>(given); + Assert.AreEqual(expected, actual); + } [Test] public void DeserializeIntoPoco() @@ -142,4 +230,41 @@ public void DeserializeIntoPocoWithAttributes() Assert.AreEqual(612, p.Age); Assert.IsNull(p.Ignored); } + + [Test] + public void DeserializeIntoList() + { + const string given = @"[""item1"",""item2""]"; + var expected = new List { "item1", "item2" }; + var p = Serializer.Deserialize(given); + Assert.AreEqual(expected, p); + } + + [Test] + public void DeserializeIntoGenericListWithPrimitive() + { + const string given = @"[""item1"",""item2""]"; + var expected = new List { "item1", "item2" }; + var p = Serializer.Deserialize>(given); + Assert.AreEqual(expected, p); + } + + [Test] + public void DeserializeIntoGenericListWithPocoWithAttributes() + { + const string given = @"[ +{""first_name"":""Cleve"",""last_name"":""Stuart"",""age"":{""@int"":""100""}}, +{""first_name"":""Darren"",""last_name"":""Cunningham"",""age"":{""@int"":""101""}} +]"; + var peeps = Serializer.Deserialize>(given); + var cleve = peeps[0]; + var darren = peeps[1]; + Assert.AreEqual("Cleve", cleve.FirstName); + Assert.AreEqual("Stuart", cleve.LastName); + Assert.AreEqual(100, cleve.Age); + + Assert.AreEqual("Darren", darren.FirstName); + Assert.AreEqual("Cunningham", darren.LastName); + Assert.AreEqual(101, darren.Age); + } } \ No newline at end of file diff --git a/Fauna.Test/Serialization/Serializer.Serialize.Tests.cs b/Fauna.Test/Serialization/Serializer.Serialize.Tests.cs index 6476b141..38ca160f 100644 --- a/Fauna.Test/Serialization/Serializer.Serialize.Tests.cs +++ b/Fauna.Test/Serialization/Serializer.Serialize.Tests.cs @@ -1,6 +1,5 @@ using System.Text.RegularExpressions; using Fauna.Serialization; -using Fauna.Serialization.Attributes; using Fauna.Types; using NUnit.Framework; diff --git a/Fauna.Test/Serialization/Utf8FaunaReader.Tests.cs b/Fauna.Test/Serialization/Utf8FaunaReader.Tests.cs index 54c75d4e..0556c65f 100644 --- a/Fauna.Test/Serialization/Utf8FaunaReader.Tests.cs +++ b/Fauna.Test/Serialization/Utf8FaunaReader.Tests.cs @@ -312,14 +312,14 @@ public void ReadSet() var reader = new Utf8FaunaReader(s); var expectedTokens = new List> { - new(TokenType.StartSet, null), + new(TokenType.StartPage, null), new(TokenType.FieldName, "data"), new(TokenType.StartArray, null), new(TokenType.Int, 99), new(TokenType.EndArray, null), new(TokenType.FieldName, "after"), new(TokenType.String, "afterme"), - new(TokenType.EndSet, null) + new(TokenType.EndPage, null) }; AssertReader(reader, expectedTokens); diff --git a/Fauna.Test/Serialization/Utf8FaunaWriter.Tests.cs b/Fauna.Test/Serialization/Utf8FaunaWriter.Tests.cs index e3e4ba35..a3c8c7b6 100644 --- a/Fauna.Test/Serialization/Utf8FaunaWriter.Tests.cs +++ b/Fauna.Test/Serialization/Utf8FaunaWriter.Tests.cs @@ -22,6 +22,7 @@ public void Init() public void Cleanup() { _writer.Dispose(); + _stream.Dispose(); } private void AssertWriter(string expected) diff --git a/Fauna/Serialization/Serializer.Deserialize.cs b/Fauna/Serialization/Serializer.Deserialize.cs index fd2fd173..e49c87ef 100644 --- a/Fauna/Serialization/Serializer.Deserialize.cs +++ b/Fauna/Serialization/Serializer.Deserialize.cs @@ -1,6 +1,4 @@ -using System.Collections; -using System.Reflection; -using Fauna.Serialization.Attributes; +using Fauna.Types; using Type = System.Type; namespace Fauna.Serialization; @@ -9,18 +7,19 @@ public static partial class Serializer { public static object? Deserialize(string str) { - return Deserialize(str, typeof(object)); + return Deserialize(str, null); } - public static T? Deserialize(string str) + public static T Deserialize(string str) { - return (T?)Deserialize(str, typeof(T)); + return (T)Deserialize(str, typeof(T)); } - public static object? Deserialize(string str, Type type) + public static object? Deserialize(string str, Type? type) { var reader = new Utf8FaunaReader(str); var context = new SerializationContext(); + reader.Read(); var obj = DeserializeValueInternal(ref reader, context, type); if (reader.Read()) @@ -31,17 +30,20 @@ public static partial class Serializer return obj; } - private static object? DeserializeValueInternal(ref Utf8FaunaReader reader, SerializationContext context, Type? targetType = null) + private static T DeserializeValueInternal(ref Utf8FaunaReader reader, SerializationContext context) { - reader.Read(); + return (T)DeserializeValueInternal(ref reader, context, typeof(T)); + } + private static object? DeserializeValueInternal(ref Utf8FaunaReader reader, SerializationContext context, Type? targetType = null) + { var value = reader.CurrentTokenType switch { TokenType.StartObject => DeserializeObjectInternal(ref reader, context, targetType), - TokenType.StartArray => throw new NotImplementedException(), - TokenType.StartSet => throw new NotImplementedException(), - TokenType.StartRef => throw new NotImplementedException(), - TokenType.StartDocument => throw new NotImplementedException(), + TokenType.StartArray => DeserializeArrayInternal(ref reader, context, targetType), + TokenType.StartPage => throw new NotImplementedException(), + TokenType.StartRef => DeserializeRefInternal(ref reader, context, targetType), + TokenType.StartDocument => DeserializeDocumentInternal(ref reader, context, targetType), TokenType.String => reader.GetValue(), TokenType.Int => reader.GetValue(), TokenType.Long => reader.GetValue(), @@ -59,21 +61,146 @@ public static partial class Serializer return value; } + private static object? DeserializeRefInternal(ref Utf8FaunaReader reader, SerializationContext context, + Type? targetType = null) + { + if (targetType != null && targetType != typeof(Ref)) + { + throw new ArgumentException($"Unsupported target type for ref. Must be a ref or undefined, but was {targetType}"); + } + + var doc = new Ref(); + while (reader.Read() && reader.CurrentTokenType != TokenType.EndRef) + { + if (reader.CurrentTokenType == TokenType.FieldName) + { + var fieldName = reader.GetString()!; + reader.Read(); + switch (fieldName) + { + case "id": + doc.Id = DeserializeValueInternal(ref reader, context); + break; + case "coll": + doc.Collection = DeserializeValueInternal(ref reader, context); + break; + } + } + else + throw new SerializationException( + $"Unexpected token while deserializing into Document: {reader.CurrentTokenType}"); + } + + return doc; + } + + private static object? DeserializeDocumentInternal(ref Utf8FaunaReader reader, SerializationContext context, + Type? targetType = null) + { + if (targetType != null && targetType != typeof(Document)) + { + return DeserializeToClassInternal(ref reader, context, targetType, TokenType.EndDocument); + } + + var doc = new Document(); + while (reader.Read() && reader.CurrentTokenType != TokenType.EndDocument) + { + if (reader.CurrentTokenType == TokenType.FieldName) + { + var fieldName = reader.GetString()!; + reader.Read(); + switch (fieldName) + { + case "id": + doc.Id = DeserializeValueInternal(ref reader, context); + break; + case "ts": + doc.Ts = DeserializeValueInternal(ref reader, context); + break; + case "coll": + doc.Collection = DeserializeValueInternal(ref reader, context); + break; + default: + doc[fieldName] = DeserializeValueInternal(ref reader, context); + break; + } + } + else + throw new SerializationException( + $"Unexpected token while deserializing into Document: {reader.CurrentTokenType}"); + } + + return doc; + } + + private static object? DeserializeArrayInternal(ref Utf8FaunaReader reader, SerializationContext context, Type? targetType = null) + { + switch (targetType) + { + case null: + case { IsGenericType: true } when targetType.GetGenericTypeDefinition() == typeof(List<>): + return DeserializeArrayToListInternal(ref reader, context, targetType); + default: + throw new SerializationException( + $"Unsupported target type for array. Must be an List<> or unspecified, but was {targetType}"); + } + } + + private static object? DeserializeArrayToListInternal(ref Utf8FaunaReader reader, SerializationContext context, + Type? targetType) + { + if (targetType == null) + { + var lst = new List(); + while (reader.Read() && reader.CurrentTokenType != TokenType.EndArray) + { + lst.Add(DeserializeValueInternal(ref reader, context)); + } + return lst; + } + else + { + var lst = Activator.CreateInstance(targetType); + var elementType = targetType.GetGenericArguments().Single(); + var add = targetType.GetMethod("Add")!; + + while (reader.Read() && reader.CurrentTokenType != TokenType.EndArray) + { + var parameters = new[] + { + DeserializeValueInternal(ref reader, context, elementType) + }; + add.Invoke(lst, parameters); + } + + return lst; + } + } + private static object? DeserializeObjectInternal(ref Utf8FaunaReader reader, SerializationContext context, Type? targetType = null) { - return targetType == null || targetType == typeof(object) ? DeserializeDictionaryInternal(ref reader, context) : DeserializeClassInternal(ref reader, context, targetType); + switch (targetType) + { + case null: + case { IsGenericType: true } when targetType.GetGenericTypeDefinition() == typeof(Dictionary<,>): + return DeserializeObjectToDictionaryInternal(ref reader, context, targetType); + default: + return DeserializeToClassInternal(ref reader, context, targetType, TokenType.EndObject); + } } - private static object? DeserializeClassInternal(ref Utf8FaunaReader reader, SerializationContext context, Type t) + private static object? DeserializeToClassInternal(ref Utf8FaunaReader reader, SerializationContext context, Type t, TokenType endToken) { var fieldMap = context.GetFieldMap(t); var instance = Activator.CreateInstance(t); - while (reader.Read() && reader.CurrentTokenType != TokenType.EndObject) + while (reader.Read() && reader.CurrentTokenType != endToken) { if (reader.CurrentTokenType == TokenType.FieldName) { var fieldName = reader.GetString()!; + reader.Read(); + if (fieldMap.ContainsKey(fieldName)) { fieldMap[fieldName].Info!.SetValue(instance, DeserializeValueInternal(ref reader, context)!); @@ -93,20 +220,67 @@ public static partial class Serializer return instance; } - private static object? DeserializeDictionaryInternal(ref Utf8FaunaReader reader, SerializationContext context) + private static object? DeserializeObjectToDictionaryInternal(ref Utf8FaunaReader reader, SerializationContext context, Type? targetType = null) { - var obj = new Dictionary(); - while (reader.Read() && reader.CurrentTokenType != TokenType.EndObject) + if (targetType == null) { - if (reader.CurrentTokenType == TokenType.FieldName) - obj[reader.GetString()!] = DeserializeValueInternal(ref reader, context)!; - else - throw new SerializationException( - $"Unexpected token while deserializing into dictionary: {reader.CurrentTokenType}"); + var obj = new Dictionary(); + + while (reader.Read() && reader.CurrentTokenType != TokenType.EndObject) + { + if (reader.CurrentTokenType == TokenType.FieldName) + { + var fieldName = reader.GetString()!; + reader.Read(); + obj[fieldName] = DeserializeValueInternal(ref reader, context)!; + } + else + throw new SerializationException( + $"Unexpected token while deserializing into dictionary: {reader.CurrentTokenType}"); + } + + return obj; } + else + { + var obj = Activator.CreateInstance(targetType); + var argTypes = targetType.GetGenericArguments(); + if (argTypes.Length != 2) + { + throw new ArgumentException($"Unsupported generic type: {targetType}"); + } - return obj; - } + var keyType = argTypes[0]; + if (keyType != typeof(string)) + { + throw new ArgumentException( + $"Unsupported Dictionary key type. Key must be of type string, but was a {keyType}"); + } -} \ No newline at end of file + var valueType = argTypes[1]; + var add = targetType.GetMethod("Add")!; + + while (reader.Read() && reader.CurrentTokenType != TokenType.EndObject) + { + + if (reader.CurrentTokenType == TokenType.FieldName) + { + var fieldName = reader.GetString()!; + reader.Read(); + var parameters = new[] + { + fieldName, + DeserializeValueInternal(ref reader, context, valueType) + }; + add.Invoke(obj, parameters); + } + else + throw new SerializationException( + $"Unexpected token while deserializing into dictionary: {reader.CurrentTokenType}"); + } + + return obj; + } + } +} diff --git a/Fauna/Serialization/TokenType.cs b/Fauna/Serialization/TokenType.cs index 280dfcf1..0e2e5787 100644 --- a/Fauna/Serialization/TokenType.cs +++ b/Fauna/Serialization/TokenType.cs @@ -17,10 +17,10 @@ public enum TokenType /// The token type is the end of a Fauna array. EndArray, - /// The token type is the start of a Fauna set. - StartSet, - /// The token type is the end of a Fauna set. - EndSet, + /// The token type is the start of a Fauna set (a.k.a. page). + StartPage, + /// The token type is the end of a Fauna set (a.k.a. page). + EndPage, /// The token type is the start of a Fauna ref. StartRef, diff --git a/Fauna/Serialization/Utf8FaunaReader.cs b/Fauna/Serialization/Utf8FaunaReader.cs index 297adb4e..39e5e57f 100644 --- a/Fauna/Serialization/Utf8FaunaReader.cs +++ b/Fauna/Serialization/Utf8FaunaReader.cs @@ -15,7 +15,7 @@ public ref struct Utf8FaunaReader private readonly HashSet _closers = new() { TokenType.EndObject, - TokenType.EndSet, + TokenType.EndPage, TokenType.EndDocument, TokenType.EndRef, TokenType.EndArray @@ -50,7 +50,7 @@ public void Skip() { case TokenType.StartObject: case TokenType.StartArray: - case TokenType.StartSet: + case TokenType.StartPage: case TokenType.StartRef: case TokenType.StartDocument: SkipInternal(); @@ -351,8 +351,8 @@ private void HandleStartObject() break; case "@set": AdvanceTrue(); - CurrentTokenType = TokenType.StartSet; - _tokenStack.Push(TokenType.StartSet); + CurrentTokenType = TokenType.StartPage; + _tokenStack.Push(TokenType.StartPage); break; case "@time": HandleTaggedString(TokenType.Time); @@ -383,8 +383,8 @@ private void HandleEndObject() CurrentTokenType = TokenType.EndDocument; AdvanceTrue(); break; - case TokenType.StartSet: - CurrentTokenType = TokenType.EndSet; + case TokenType.StartPage: + CurrentTokenType = TokenType.EndPage; AdvanceTrue(); break; case TokenType.StartRef: diff --git a/Fauna/Types/Document.cs b/Fauna/Types/Document.cs new file mode 100644 index 00000000..faf9fd67 --- /dev/null +++ b/Fauna/Types/Document.cs @@ -0,0 +1,79 @@ +using System.Collections; + +namespace Fauna.Types; + +public sealed class Document : Ref, IDictionary +{ + private Dictionary _data = new(); + + public DateTime Ts { get; set; } + + public ICollection Keys => _data.Keys; + public ICollection Values => _data.Values; + + public IEnumerator> GetEnumerator() + { + return _data.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return ((IEnumerable)_data).GetEnumerator(); + } + + public void Add(KeyValuePair item) + { + _data.Add(item.Key, item.Value); + } + + public void Clear() + { + _data.Clear(); + } + + public bool Contains(KeyValuePair item) + { + return _data.Contains(item); + } + + public void CopyTo(KeyValuePair[] array, int arrayIndex) + { + throw new NotImplementedException(); + } + + public bool Remove(KeyValuePair item) + { + throw new NotImplementedException(); + } + + public int Count => _data.Count; + + public bool IsReadOnly => true; + + public void Add(string key, object? value) + { + _data.Add(key, value); + } + + public bool ContainsKey(string key) + { + return _data.ContainsKey(key); + } + + public bool Remove(string key) + { + return _data.Remove(key); + } + + public bool TryGetValue(string key, out object? value) + { + throw new NotImplementedException(); + } + + public object? this[string key] + { + get => _data[key]; + set => _data[key] = value; + } + +} diff --git a/Fauna/Types/Module.cs b/Fauna/Types/Module.cs index 26705f4e..2a1a134c 100644 --- a/Fauna/Types/Module.cs +++ b/Fauna/Types/Module.cs @@ -1,6 +1,6 @@ namespace Fauna.Types; -public class Module : IEquatable +public sealed class Module : IEquatable { public string Name { get; } diff --git a/Fauna/Types/Ref.cs b/Fauna/Types/Ref.cs new file mode 100644 index 00000000..64f3989d --- /dev/null +++ b/Fauna/Types/Ref.cs @@ -0,0 +1,7 @@ +namespace Fauna.Types; + +public class Ref +{ + public string Id { get; set; } + public Module Collection { get; set; } +} From a0b4f328bd661d57d92f81835eecacde4bbf8ae5 Mon Sep 17 00:00:00 2001 From: Lucas Pedroza Date: Fri, 15 Dec 2023 17:49:33 +0100 Subject: [PATCH 2/2] Fix bug when deserializing doc to class --- .../Serialization/Serializer.Classes.Tests.cs | 7 +++++++ .../Serializer.Deserialize.Tests.cs | 17 +++++++++++++++++ Fauna/Serialization/Serializer.Deserialize.cs | 1 - 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/Fauna.Test/Serialization/Serializer.Classes.Tests.cs b/Fauna.Test/Serialization/Serializer.Classes.Tests.cs index 586c928c..3a46dfe7 100644 --- a/Fauna.Test/Serialization/Serializer.Classes.Tests.cs +++ b/Fauna.Test/Serialization/Serializer.Classes.Tests.cs @@ -12,6 +12,12 @@ private class Person public int Age { get; set; } = 61; } + [FaunaObject] + private class ClassForDocument + { + [Field("user_field")] public string? UserField { get; set; } + } + [FaunaObject] private class PersonWithAttributes { @@ -21,6 +27,7 @@ private class PersonWithAttributes public string? Ignored { get; set; } } + [FaunaObject] private class ClassWithInvalidPropertyTypeHint { diff --git a/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs b/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs index b3868c83..35ff5ba8 100644 --- a/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs +++ b/Fauna.Test/Serialization/Serializer.Deserialize.Tests.cs @@ -87,6 +87,23 @@ public void DeserializeDocument() Assert.AreEqual("user_value", actual["user_field"]); } + [Test] + public void DeserializeDocumentToClass() + { + const string given = @" + { + ""@doc"":{ + ""id"":""123"", + ""coll"":{""@mod"":""MyColl""}, + ""ts"":{""@time"":""2023-12-15T01:01:01.0010010Z""}, + ""user_field"":""user_value"" + } + }"; + + var actual = Serializer.Deserialize(given); + Assert.AreEqual("user_value", actual.UserField); + } + [Test] public void DeserializeRef() { diff --git a/Fauna/Serialization/Serializer.Deserialize.cs b/Fauna/Serialization/Serializer.Deserialize.cs index e49c87ef..e4e439eb 100644 --- a/Fauna/Serialization/Serializer.Deserialize.cs +++ b/Fauna/Serialization/Serializer.Deserialize.cs @@ -207,7 +207,6 @@ private static T DeserializeValueInternal(ref Utf8FaunaReader reader, Seriali } else { - reader.Read(); reader.Skip(); } }