I'm using Dapper to read data from SQL Server. I have a SQL statement that returns a long Json result but the issue is this result being split into 3 rows with 2033 characters max per row, then Dapper can't parse the returned result because it's invalid Json.
How to prevent this splitting or how to make Dapper deal with it?
This is my code:
SqlMapper.ResetTypeHandlers();
SqlMapper.AddTypeHandler(new JsonTypeHandler<List<Product>>());
const string sql = @"SELECT
*,
(SELECT * FROM Balance b
WHERE p.SKU = b.SKU
FOR JSON PATH) AS [Balances]
FROM Product p
WHERE SKU IN @SKUs
FOR JSON PATH";
var connection = new SqlConnection("myconnection");
return connection.QuerySingleAsync<List<Product>>(sql, new{SKUs = new[] {"foo", "bar"}} });
And the code of TypeHandler:
public class JsonTypeHandler<T> : SqlMapper.TypeHandler<T>
{
public override T Parse(object value)
{
return JsonConvert.DeserializeObject<T>(value.ToString());
}
public override void SetValue(IDbDataParameter parameter, T value)
{
parameter.Value = JsonConvert.SerializeObject(value);
}
}
And here is how I run this SQL in DataGrip Edit: Here is the error message:
Newtonsoft.Json.JsonSerializationException : Unexpected end when deserializing object. Path '[0].Balances[4].WarehouseId', line 1, position 2033.
My solution is writing another extension method that wraps Query<string>
method likes below:
public static T QueryJson<T>(this IDbConnection cnn, string sql, object param = null,
IDbTransaction transaction = null, bool buffered = true, int? commandTimeout = null,
CommandType? commandType = null) where T: class
{
var result = cnn.Query<string>(sql, param, transaction, buffered, commandTimeout, commandType).ToList();
if (!result.Any())
return default(T);
// Concats
var sb = new StringBuilder();
foreach (var jsonPart in result)
sb.Append(jsonPart);
var settings = new JsonSerializerSettings
{
// https://github.com/danielwertheim/jsonnet-contractresolvers
// I use this Contract Resolver to set data to private setter properties
ContractResolver = new PrivateSetterContractResolver()
};
// Using Json.Net to de-serialize objects
return JsonConvert.DeserializeObject<T>(sb.ToString(), settings);
}
This solution works quite well and slower then multiple mapping method when query large data (1000 objects took 2.7 seconds in compare to 1.3 seconds).
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With