diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs index 5c07d8483c..419e0319a3 100644 --- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs +++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs @@ -68,7 +68,7 @@ public HiveServer2Reader( HiveServer2Statement statement, Schema schema, DataTypeConversion dataTypeConversion, - CancellationToken cancellationToken = default) + CancellationToken _ = default) { _statement = statement; Schema = schema; @@ -88,22 +88,20 @@ public HiveServer2Reader( // Await the fetch response TFetchResultsResp response = await FetchNext(_statement, cancellationToken); - // Build the current batch - RecordBatch result = CreateBatch(response, out int fetchedRows); - - if ((_statement.BatchSize > 0 && fetchedRows < _statement.BatchSize) || fetchedRows == 0) + int columnCount = GetColumnCount(response); + int rowCount = GetRowCount(response, columnCount); ; + if ((_statement.BatchSize > 0 && rowCount < _statement.BatchSize) || rowCount == 0) { // This is the last batch _statement = null; } - // Return the current batch. - return result; + // Build the current batch, if any data exists + return rowCount > 0 ? CreateBatch(response, columnCount, rowCount) : null; } - private RecordBatch CreateBatch(TFetchResultsResp response, out int length) + private RecordBatch CreateBatch(TFetchResultsResp response, int columnCount, int rowCount) { - int columnCount = response.Results.Columns.Count; IList columnData = []; bool shouldConvertScalar = _dataTypeConversion.HasFlag(DataTypeConversion.Scalar); for (int i = 0; i < columnCount; i++) @@ -113,10 +111,15 @@ private RecordBatch CreateBatch(TFetchResultsResp response, out int length) columnData.Add(columnArray); } - length = columnCount > 0 ? GetArray(response.Results.Columns[0]).Length : 0; - return new RecordBatch(Schema, columnData, length); + return new RecordBatch(Schema, columnData, rowCount); } + private static int GetColumnCount(TFetchResultsResp response) => + response.Results.Columns.Count; + + private static int GetRowCount(TFetchResultsResp response, int columnCount) => + columnCount > 0 ? GetArray(response.Results.Columns[0]).Length : 0; + private static async Task FetchNext(HiveServer2Statement statement, CancellationToken cancellationToken = default) { var request = new TFetchResultsReq(statement.OperationHandle, TFetchOrientation.FETCH_NEXT, statement.BatchSize); diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs b/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs index d5a3b117cb..d35ebdd101 100644 --- a/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs +++ b/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs @@ -72,20 +72,22 @@ public static void CanClientExecuteUpdate(Adbc.Client.AdbcConnection adbcConnect /// /// The to use. /// The to use - public static void CanClientGetSchema(Adbc.Client.AdbcConnection adbcConnection, TestConfiguration testConfiguration) + /// The custom query to use instead of query from "/> + /// The custom column count to use instead of query from + public static void CanClientGetSchema(Adbc.Client.AdbcConnection adbcConnection, TestConfiguration testConfiguration, string? customQuery = default, int? expectedColumnCount = default) { if (adbcConnection == null) throw new ArgumentNullException(nameof(adbcConnection)); if (testConfiguration == null) throw new ArgumentNullException(nameof(testConfiguration)); adbcConnection.Open(); - using AdbcCommand adbcCommand = new AdbcCommand(testConfiguration.Query, adbcConnection); + using AdbcCommand adbcCommand = new AdbcCommand(customQuery ?? testConfiguration.Query, adbcConnection); using AdbcDataReader reader = adbcCommand.ExecuteReader(CommandBehavior.SchemaOnly); DataTable? table = reader.GetSchemaTable(); // there is one row per field - Assert.Equal(testConfiguration.Metadata.ExpectedColumnCount, table?.Rows.Count); + Assert.Equal(expectedColumnCount ?? testConfiguration.Metadata.ExpectedColumnCount, table?.Rows.Count); } /// @@ -98,7 +100,9 @@ public static void CanClientGetSchema(Adbc.Client.AdbcConnection adbcConnection, public static void CanClientExecuteQuery( Adbc.Client.AdbcConnection adbcConnection, TestConfiguration testConfiguration, - Action? additionalCommandOptionsSetter = null) + Action? additionalCommandOptionsSetter = null, + string? customQuery = default, + int? expectedResultsCount = default) { if (adbcConnection == null) throw new ArgumentNullException(nameof(adbcConnection)); if (testConfiguration == null) throw new ArgumentNullException(nameof(testConfiguration)); @@ -107,7 +111,7 @@ public static void CanClientExecuteQuery( adbcConnection.Open(); - using AdbcCommand adbcCommand = new AdbcCommand(testConfiguration.Query, adbcConnection); + using AdbcCommand adbcCommand = new AdbcCommand(customQuery ?? testConfiguration.Query, adbcConnection); additionalCommandOptionsSetter?.Invoke(adbcCommand); using AdbcDataReader reader = adbcCommand.ExecuteReader(); @@ -131,7 +135,7 @@ public static void CanClientExecuteQuery( } finally { reader.Close(); } - Assert.Equal(testConfiguration.ExpectedResultsCount, count); + Assert.Equal(expectedResultsCount ?? testConfiguration.ExpectedResultsCount, count); } /// diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs index d4662c504f..46f52584a8 100644 --- a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs +++ b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs @@ -170,6 +170,11 @@ protected string[] GetQueries() return queries; } + protected SampleDataBuilder GetSampleDataBuilder() + { + return TestEnvironment.GetSampleDataBuilder(); + } + /// /// Gets a the Spark ADBC driver with settings from the . /// diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs b/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs index ad805dee24..374c102f37 100644 --- a/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs +++ b/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs @@ -53,6 +53,8 @@ protected TestEnvironment(Func getConnection) public abstract AdbcDriver CreateNewDriver(); + public abstract SampleDataBuilder GetSampleDataBuilder(); + public abstract Dictionary GetDriverParameters(TConfig testConfiguration); public virtual string GetCreateTemporaryTableStatement(string tableName, string columns) diff --git a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs index 57efaf380b..9408c0d6b5 100644 --- a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs +++ b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs @@ -73,5 +73,6 @@ public override Dictionary GetDriverParameters(ApacheTestConfigu public override string GetInsertStatement(string tableName, string columnName, string? value) => string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, columnName, value ?? "NULL"); + public override SampleDataBuilder GetSampleDataBuilder() => throw new NotImplementedException(); } } diff --git a/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs b/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs index 5d2c5f2bf7..403c4ac017 100644 --- a/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs +++ b/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs @@ -98,6 +98,9 @@ await ValidateInsertSelectDeleteTwoValuesAsync( [InlineData("CAST(NULL AS CHAR(10))")] [InlineData("CAST(NULL AS BOOLEAN)")] [InlineData("CAST(NULL AS BINARY)")] + [InlineData("CAST(NULL AS MAP)")] + [InlineData("CAST(NULL AS STRUCT)")] + [InlineData("CAST(NULL AS ARRAY)")] public async Task TestNullData(string projectionClause) { string selectStatement = $"SELECT {projectionClause};"; diff --git a/csharp/test/Drivers/Apache/Spark/ClientTests.cs b/csharp/test/Drivers/Apache/Spark/ClientTests.cs new file mode 100644 index 0000000000..5802f82b23 --- /dev/null +++ b/csharp/test/Drivers/Apache/Spark/ClientTests.cs @@ -0,0 +1,227 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +using System; +using System.Collections.Generic; +using Apache.Arrow.Adbc.Drivers.Apache.Spark; +using Apache.Arrow.Adbc.Tests.Xunit; +using Xunit; +using Xunit.Abstractions; + +namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark +{ + /// + /// Class for testing the ADBC Client using the BigQuery ADBC driver. + /// + /// + /// Tests are ordered to ensure data is created for the other + /// queries to run. + /// + [TestCaseOrderer("Apache.Arrow.Adbc.Tests.Xunit.TestOrderer", "Apache.Arrow.Adbc.Tests")] + public class ClientTests : TestBase + { + public ClientTests(ITestOutputHelper? outputHelper) : base(outputHelper, new SparkTestEnvironment.Factory()) + { + Skip.IfNot(Utils.CanExecuteTestConfig(TestConfigVariable)); + } + + /// + /// Validates if the client execute updates. + /// + [SkippableFact, Order(1)] + public void CanClientExecuteUpdate() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection()) + { + adbcConnection.Open(); + + string[] queries = GetQueries(); + int affectedRows = ValidateAffectedRows ? 1 : -1; + + List expectedResults = TestEnvironment.ServerType != SparkServerType.Databricks + ? [ + -1, // DROP TABLE + -1, // CREATE TABLE + affectedRows, // INSERT + affectedRows, // INSERT + affectedRows, // INSERT + //1, // UPDATE + //1, // DELETE + ] + : [ + -1, // DROP TABLE + -1, // CREATE TABLE + affectedRows, // INSERT + affectedRows, // INSERT + affectedRows, // INSERT + affectedRows, // UPDATE + affectedRows, // DELETE + ]; + + + Tests.ClientTests.CanClientExecuteUpdate(adbcConnection, TestConfiguration, queries, expectedResults); + } + } + + /// + /// Validates if the client can get the schema. + /// + [SkippableFact, Order(2)] + public void CanClientGetSchema() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection()) + { + Tests.ClientTests.CanClientGetSchema(adbcConnection, TestConfiguration, $"SELECT * FROM {TestConfiguration.Metadata.Table}"); + } + } + + /// + /// Validates if the client can connect to a live server and + /// parse the results. + /// + [SkippableFact, Order(3)] + public void CanClientExecuteQuery() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection()) + { + Tests.ClientTests.CanClientExecuteQuery(adbcConnection, TestConfiguration); + } + } + + /// + /// Validates if the client can connect to a live server and + /// parse the results. + /// + [SkippableFact, Order(5)] + public void CanClientExecuteEmptyQuery() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection()) + { + Tests.ClientTests.CanClientExecuteQuery( + adbcConnection, + TestConfiguration, + customQuery: $"SELECT * FROM {TestConfiguration.Metadata.Table} WHERE FALSE", + expectedResultsCount: 0); + } + } + + /// + /// Validates if the client is retrieving and converting values + /// to the expected types. + /// + [SkippableFact, Order(4)] + public void VerifyTypesAndValues() + { + using (Adbc.Client.AdbcConnection dbConnection = GetAdbcConnection()) + { + SampleDataBuilder sampleDataBuilder = GetSampleDataBuilder(); + + Tests.ClientTests.VerifyTypesAndValues(dbConnection, sampleDataBuilder); + } + } + + [SkippableFact] + public void VerifySchemaTablesWithNoConstraints() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection(includeTableConstraints: false)) + { + adbcConnection.Open(); + + string schema = "Tables"; + + var tables = adbcConnection.GetSchema(schema); + + Assert.True(tables.Rows.Count > 0, $"No tables were found in the schema '{schema}'"); + } + } + + + [SkippableFact] + public void VerifySchemaTables() + { + using (Adbc.Client.AdbcConnection adbcConnection = GetAdbcConnection()) + { + adbcConnection.Open(); + + var collections = adbcConnection.GetSchema("MetaDataCollections"); + Assert.Equal(7, collections.Rows.Count); + Assert.Equal(2, collections.Columns.Count); + + var restrictions = adbcConnection.GetSchema("Restrictions"); + Assert.Equal(11, restrictions.Rows.Count); + Assert.Equal(3, restrictions.Columns.Count); + + var catalogs = adbcConnection.GetSchema("Catalogs"); + Assert.Single(catalogs.Columns); + var catalog = (string?)catalogs.Rows[0].ItemArray[0]; + + catalogs = adbcConnection.GetSchema("Catalogs", new[] { catalog }); + Assert.Equal(1, catalogs.Rows.Count); + + string random = "X" + Guid.NewGuid().ToString("N"); + + catalogs = adbcConnection.GetSchema("Catalogs", new[] { random }); + Assert.Equal(0, catalogs.Rows.Count); + + var schemas = adbcConnection.GetSchema("Schemas", new[] { catalog }); + Assert.Equal(2, schemas.Columns.Count); + var schema = (string?)schemas.Rows[0].ItemArray[1]; + + schemas = adbcConnection.GetSchema("Schemas", new[] { catalog, schema }); + Assert.Equal(1, schemas.Rows.Count); + + schemas = adbcConnection.GetSchema("Schemas", new[] { random }); + Assert.Equal(0, schemas.Rows.Count); + + schemas = adbcConnection.GetSchema("Schemas", new[] { catalog, random }); + Assert.Equal(0, schemas.Rows.Count); + + schemas = adbcConnection.GetSchema("Schemas", new[] { random, random }); + Assert.Equal(0, schemas.Rows.Count); + + var tableTypes = adbcConnection.GetSchema("TableTypes"); + Assert.Single(tableTypes.Columns); + + var tables = adbcConnection.GetSchema("Tables", new[] { catalog, schema }); + Assert.Equal(4, tables.Columns.Count); + + tables = adbcConnection.GetSchema("Tables", new[] { catalog, random }); + Assert.Equal(0, tables.Rows.Count); + + tables = adbcConnection.GetSchema("Tables", new[] { random, schema }); + Assert.Equal(0, tables.Rows.Count); + + tables = adbcConnection.GetSchema("Tables", new[] { random, random }); + Assert.Equal(0, tables.Rows.Count); + + tables = adbcConnection.GetSchema("Tables", new[] { catalog, schema, random }); + Assert.Equal(0, tables.Rows.Count); + + var columns = adbcConnection.GetSchema("Columns", new[] { catalog, schema }); + Assert.Equal(16, columns.Columns.Count); + } + } + + private Adbc.Client.AdbcConnection GetAdbcConnection(bool includeTableConstraints = true) + { + return new Adbc.Client.AdbcConnection( + NewDriver, GetDriverParameters(TestConfiguration), + [] + ); + } + } +} diff --git a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs index 1b79facf4f..247d6d00bf 100644 --- a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs +++ b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs @@ -17,8 +17,11 @@ using System; using System.Collections.Generic; +using System.Data.SqlTypes; +using System.Text; using Apache.Arrow.Adbc.Drivers.Apache.Hive2; using Apache.Arrow.Adbc.Drivers.Apache.Spark; +using Apache.Arrow.Types; namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark { @@ -26,7 +29,7 @@ public class SparkTestEnvironment : TestEnvironment { public class Factory : Factory { - public override SparkTestEnvironment Create(Func getConnection) => new SparkTestEnvironment(getConnection); + public override SparkTestEnvironment Create(Func getConnection) => new(getConnection); } private SparkTestEnvironment(Func getConnection) : base(getConnection) { } @@ -132,5 +135,151 @@ public override Dictionary GetDriverParameters(SparkTestConfigur public override string GetInsertStatement(string tableName, string columnName, string? value) => string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, columnName, value ?? "NULL"); + + public override SampleDataBuilder GetSampleDataBuilder() + { + SampleDataBuilder sampleDataBuilder = new(); + + // standard values + sampleDataBuilder.Samples.Add( + new SampleData() + { + Query = "SELECT " + + "CAST(1 as BIGINT) as id, " + + "CAST(2 as INTEGER) as int, " + + "CAST(1.23 as FLOAT) as number_float, " + + "CAST(4.56 as DOUBLE) as number_double, " + + "4.56BD as decimal, " + + "9.9999999999999999999999999999999999999BD as big_decimal, " + + "CAST(True as BOOLEAN) as is_active, " + + "'John Doe' as name, " + + "X'616263313233' as data, " + + "DATE '2023-09-08' as date, " + + "TIMESTAMP '2023-09-08 12:34:56+00:00' as timestamp, " + + "INTERVAL 178956969 YEAR 11 MONTH as interval, " + + "ARRAY(1, 2, 3) as numbers, " + + "STRUCT('John Doe' as name, 30 as age) as person," + + "MAP('name', CAST('Jane Doe' AS STRING), 'age', CAST(29 AS INT)) as map", + ExpectedValues = + [ + new("id", typeof(long), typeof(Int64Type), 1L), + new("int", typeof(int), typeof(Int32Type), 2), + new("number_float", typeof(double), typeof(DoubleType), 1.23d), + new("number_double", typeof(double), typeof(DoubleType), 4.56d), + new("decimal", typeof(SqlDecimal), typeof(Decimal128Type), SqlDecimal.Parse("4.56")), + new("big_decimal", typeof(SqlDecimal), typeof(Decimal128Type), SqlDecimal.Parse("9.9999999999999999999999999999999999999")), + new("is_active", typeof(bool), typeof(BooleanType), true), + new("name", typeof(string), typeof(StringType), "John Doe"), + new("data", typeof(byte[]), typeof(BinaryType), UTF8Encoding.UTF8.GetBytes("abc123")), + new("date", typeof(DateTime), typeof(Date32Type), new DateTime(2023, 9, 8)), + new("timestamp", typeof(DateTimeOffset), typeof(TimestampType), new DateTimeOffset(new DateTime(2023, 9, 8, 12, 34, 56), TimeSpan.Zero)), + new("interval", typeof(string), typeof(StringType), "178956969-11"), + new("numbers", typeof(string), typeof(StringType), "[1,2,3]"), + new("person", typeof(string), typeof(StringType), """{"name":"John Doe","age":30}"""), + new("map", typeof(string), typeof(StringType), """{"age":"29","name":"Jane Doe"}""") // This is unexpected JSON. Expecting 29 to be a numeric and not string. + ] + }); + + sampleDataBuilder.Samples.Add( + new SampleData() + { + Query = "SELECT " + + "CAST(NULL as BIGINT) as id, " + + "CAST(NULL as INTEGER) as int, " + + "CAST(NULL as FLOAT) as number_float, " + + "CAST(NULL as DOUBLE) as number_double, " + + "CAST(NULL as DECIMAL(38,2)) as decimal, " + + "CAST(NULL as BOOLEAN) as is_active, " + + "CAST(NULL as STRING) as name, " + + "CAST(NULL as BINARY) as data, " + + "CAST(NULL as DATE) as date, " + + "CAST(NULL as TIMESTAMP) as timestamp," + + "CAST(NULL as MAP) as map, " + + "CAST(NULL as ARRAY) as numbers, " + + "CAST(NULL as STRUCT) as person, " + + "MAP(CAST('EMPTY' as STRING), CAST(NULL as INTEGER)) as map_null, " + + "ARRAY(NULL,NULL,NULL) as numbers_null, " + + "STRUCT(CAST(NULL as STRING), CAST(NULL as INTEGER)) as person_null", + //"CAST(NULL as STRUCT) as struct, " + + //"STRUCT(CAST(NULL as STRING) as name, CAST(NULL as BIGINT) as age) as person", + ExpectedValues = + [ + new("id", typeof(long), typeof(Int64Type), null), + new("int", typeof(int), typeof(Int32Type), null), + new("number_float", typeof(double), typeof(DoubleType), null), + new("number_double", typeof(double), typeof(DoubleType), null), + new("decimal", typeof(SqlDecimal), typeof(Decimal128Type), null), + new("is_active", typeof(bool), typeof(BooleanType), null), + new("name", typeof(string), typeof(StringType), null), + new("data", typeof(byte[]), typeof(BinaryType), null), + new("date", typeof(DateTime), typeof(Date32Type), null), + new("timestamp", typeof(DateTimeOffset), typeof(TimestampType), null), + new("map", typeof(string), typeof(StringType), null), + new("numbers", typeof(string), typeof(StringType), null), + new("person", typeof(string), typeof(StringType), null), + new("map_null", typeof(string), typeof(StringType), """{"EMPTY":null}"""), + new("numbers_null", typeof(string), typeof(StringType), """[null,null,null]"""), + new("person_null", typeof(string), typeof(StringType), """{"col1":null,"col2":null}"""), + ] + }); + + // complex struct + sampleDataBuilder.Samples.Add( + new SampleData() + { + Query = "SELECT " + + "STRUCT(" + + "\"Iron Man\" as name," + + "\"Avengers\" as team," + + "ARRAY(\"Genius\", \"Billionaire\", \"Playboy\", \"Philanthropist\") as powers," + + "ARRAY(" + + " STRUCT(" + + " \"Captain America\" as name, " + + " \"Avengers\" as team, " + + " ARRAY(\"Super Soldier Serum\", \"Vibranium Shield\") as powers, " + + " ARRAY(" + + " STRUCT(" + + " \"Thanos\" as name, " + + " \"Black Order\" as team, " + + " ARRAY(\"Infinity Gauntlet\", \"Super Strength\", \"Teleportation\") as powers, " + + " ARRAY(" + + " STRUCT(" + + " \"Loki\" as name, " + + " \"Asgard\" as team, " + + " ARRAY(\"Magic\", \"Shapeshifting\", \"Trickery\") as powers " + + " )" + + " ) as allies" + + " )" + + " ) as enemies" + + " )," + + " STRUCT(" + + " \"Spider-Man\" as name, " + + " \"Avengers\" as team, " + + " ARRAY(\"Spider-Sense\", \"Web-Shooting\", \"Wall-Crawling\") as powers, " + + " ARRAY(" + + " STRUCT(" + + " \"Green Goblin\" as name, " + + " \"Sinister Six\" as team, " + + " ARRAY(\"Glider\", \"Pumpkin Bombs\", \"Super Strength\") as powers, " + + " ARRAY(" + + " STRUCT(" + + " \"Doctor Octopus\" as name, " + + " \"Sinister Six\" as team, " + + " ARRAY(\"Mechanical Arms\", \"Genius\", \"Madness\") as powers " + + " )" + + " ) as allies" + + " )" + + " ) as enemies" + + " )" + + " ) as friends" + + ") as iron_man", + ExpectedValues = + [ + new("iron_man", typeof(string), typeof(StringType), "{\"name\":\"Iron Man\",\"team\":\"Avengers\",\"powers\":[\"Genius\",\"Billionaire\",\"Playboy\",\"Philanthropist\"],\"friends\":[{\"name\":\"Captain America\",\"team\":\"Avengers\",\"powers\":[\"Super Soldier Serum\",\"Vibranium Shield\"],\"enemies\":[{\"name\":\"Thanos\",\"team\":\"Black Order\",\"powers\":[\"Infinity Gauntlet\",\"Super Strength\",\"Teleportation\"],\"allies\":[{\"name\":\"Loki\",\"team\":\"Asgard\",\"powers\":[\"Magic\",\"Shapeshifting\",\"Trickery\"]}]}]},{\"name\":\"Spider-Man\",\"team\":\"Avengers\",\"powers\":[\"Spider-Sense\",\"Web-Shooting\",\"Wall-Crawling\"],\"enemies\":[{\"name\":\"Green Goblin\",\"team\":\"Sinister Six\",\"powers\":[\"Glider\",\"Pumpkin Bombs\",\"Super Strength\"],\"allies\":[{\"name\":\"Doctor Octopus\",\"team\":\"Sinister Six\",\"powers\":[\"Mechanical Arms\",\"Genius\",\"Madness\"]}]}]}]}") + ] + }); + + return sampleDataBuilder; + } } }