Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 0 additions & 17 deletions Drivers/DbDriver.cs
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,6 @@ public abstract class DbDriver

protected abstract Dictionary<string, ColumnMapping> ColumnMappings { get; }

protected const string JsonElementTypeHandler = """
private class JsonElementTypeHandler : SqlMapper.TypeHandler<JsonElement>
{
public override JsonElement Parse(object value)
{
if (value is string s)
return JsonDocument.Parse(s).RootElement;
throw new DataException($"Cannot convert {value?.GetType()} to JsonElement");
}

public override void SetValue(IDbDataParameter parameter, JsonElement value)
{
parameter.Value = value.GetRawText();
}
}
""";

protected const string TransformQueryForSliceArgsImpl = """
public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName)
{
Expand Down
17 changes: 17 additions & 0 deletions Drivers/MySqlConnectorDriver.cs
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,23 @@ public sealed partial class MySqlConnectorDriver(

public override string TransactionClassName => "MySqlTransaction";

private const string JsonElementTypeHandler = """
private class JsonElementTypeHandler : SqlMapper.TypeHandler<JsonElement>
{
public override JsonElement Parse(object value)
{
if (value is string s)
return JsonDocument.Parse(s).RootElement;
throw new DataException($"Cannot convert {value?.GetType()} to JsonElement");
}

public override void SetValue(IDbDataParameter parameter, JsonElement value)
{
parameter.Value = value.GetRawText();
}
}
""";

public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface,
string returnInterface, Query query)
{
Expand Down
25 changes: 21 additions & 4 deletions Drivers/NpgsqlDriver.cs
Original file line number Diff line number Diff line change
Expand Up @@ -143,16 +143,16 @@ public NpgsqlDriver(
["JsonElement"] = new(
new()
{
{ "json", new() },
{ "jsonb", new() }
{ "json", new(NpgsqlTypeOverride: "NpgsqlDbType.Json") },
{ "jsonb", new(NpgsqlTypeOverride: "NpgsqlDbType.Jsonb") }
},
readerFn: ordinal => $"JsonSerializer.Deserialize<JsonElement>(reader.GetString({ordinal}))",
writerFn: (el, notNull, isDapper) =>
{
if (notNull)
return $"{el}.GetRawText()";
return $"{el}";
var nullValue = isDapper ? "null" : "(object)DBNull.Value";
return $"{el}.HasValue ? {el}.Value.GetRawText() : {nullValue}";
return $"{el}.HasValue ? (object) {el}.Value : {nullValue}";
},
usingDirective: "System.Text.Json",
sqlMapper: "SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler());",
Expand Down Expand Up @@ -345,6 +345,23 @@ public NpgsqlDriver(

public override string TransactionClassName => "NpgsqlTransaction";

private const string JsonElementTypeHandler = """
private class JsonElementTypeHandler : SqlMapper.TypeHandler<JsonElement>
{
public override JsonElement Parse(object value)
{
if (value is string s)
return JsonDocument.Parse(s).RootElement;
throw new DataException($"Cannot convert {value?.GetType()} to JsonElement");
}

public override void SetValue(IDbDataParameter parameter, JsonElement value)
{
parameter.Value = value;
}
}
""";

private const string XmlDocumentTypeHandler =
"""
private class XmlDocumentTypeHandler : SqlMapper.TypeHandler<XmlDocument>
Expand Down
4 changes: 2 additions & 2 deletions docs/04_Postgres.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ we consider support for the different data types separately for batch inserts an
| tsvector | ✅ | ❌ |
| tsquery | ✅ | ❌ |
| uuid | ✅ | ✅ |
| json | ✅ | ⚠️ |
| jsonb | ✅ | ⚠️ |
| json | ✅ | |
| jsonb | ✅ | |
| jsonpath | ✅ | ⚠️ |
| xml | ✅ | ⚠️ |
| enum | ✅ | ⚠️ |
Expand Down
2 changes: 2 additions & 0 deletions end2end/EndToEndScaffold/Config.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ public enum KnownTestType
PostgresFloatingPointCopyFrom,
PostgresDateTimeCopyFrom,
PostgresGuidCopyFrom,
PostgresJsonCopyFrom,
PostgresNetworkCopyFrom,
PostgresArrayCopyFrom,
PostgresGeoCopyFrom,
Expand Down Expand Up @@ -179,6 +180,7 @@ internal static class Config
KnownTestType.PostgresFloatingPointCopyFrom,
KnownTestType.PostgresDateTimeCopyFrom,
KnownTestType.PostgresGuidCopyFrom,
KnownTestType.PostgresJsonCopyFrom,
KnownTestType.PostgresArrayCopyFrom,
KnownTestType.PostgresNetworkCopyFrom
];
Expand Down
47 changes: 47 additions & 0 deletions end2end/EndToEndScaffold/Templates/PostgresTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -509,6 +509,53 @@ void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPos
}
"""
},
[KnownTestType.PostgresJsonCopyFrom] = new TestImpl
{
Impl = $$"""
[Test]
[TestCase(100, "{\"song\": \"Pinball Wizard\", \"album\": \"Tommy\", \"artist\": \"The Who\"}")]
[TestCase(10, null)]
public async Task TestPostgresJsonCopyFrom(int batchSize, string cJson)
{
JsonElement? cParsedJson = null;
if (cJson != null)
cParsedJson = JsonDocument.Parse(cJson).RootElement;

var batchArgs = Enumerable.Range(0, batchSize)
.Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs
{
CJson = cParsedJson,
CJsonb = cParsedJson
})
.ToList();
await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs);

var expected = new QuerySql.GetPostgresSpecialTypesCntRow
{
Cnt = batchSize,
CJson = cParsedJson,
CJsonb = cParsedJson
};
var actual = await QuerySql.GetPostgresSpecialTypesCnt();
AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}});

void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y)
{
var options = new JsonSerializerOptions
{
WriteIndented = false
};
Assert.That(y.Cnt, Is.EqualTo(x.Cnt));
Assert.That(y.CJson.HasValue, Is.EqualTo(x.CJson.HasValue));
if (y.CJson.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJson.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJson.Value, options)));
Assert.That(y.CJsonb.HasValue, Is.EqualTo(x.CJsonb.HasValue));
if (y.CJsonb.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJsonb.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJsonb.Value, options)));
}
}
"""
},
[KnownTestType.PostgresInvalidJson] = new TestImpl
{
Impl = $$"""
Expand Down
34 changes: 34 additions & 0 deletions end2end/EndToEndTests/NpgsqlDapperTester.generated.cs
Original file line number Diff line number Diff line change
Expand Up @@ -892,6 +892,40 @@ void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.Get
}
}

[Test]
[TestCase(100, "{\"song\": \"Pinball Wizard\", \"album\": \"Tommy\", \"artist\": \"The Who\"}")]
[TestCase(10, null)]
public async Task TestPostgresJsonCopyFrom(int batchSize, string cJson)
{
JsonElement? cParsedJson = null;
if (cJson != null)
cParsedJson = JsonDocument.Parse(cJson).RootElement;
var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CJson = cParsedJson, CJsonb = cParsedJson }).ToList();
await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs);
var expected = new QuerySql.GetPostgresSpecialTypesCntRow
{
Cnt = batchSize,
CJson = cParsedJson,
CJsonb = cParsedJson
};
var actual = await QuerySql.GetPostgresSpecialTypesCnt();
AssertSingularEquals(expected, actual);
void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y)
{
var options = new JsonSerializerOptions
{
WriteIndented = false
};
Assert.That(y.Cnt, Is.EqualTo(x.Cnt));
Assert.That(y.CJson.HasValue, Is.EqualTo(x.CJson.HasValue));
if (y.CJson.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJson.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJson.Value, options)));
Assert.That(y.CJsonb.HasValue, Is.EqualTo(x.CJsonb.HasValue));
if (y.CJsonb.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJsonb.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJsonb.Value, options)));
}
}

private static IEnumerable<TestCaseData> PostgresNetworkCopyFromTestCases
{
get
Expand Down
34 changes: 34 additions & 0 deletions end2end/EndToEndTests/NpgsqlTester.generated.cs
Original file line number Diff line number Diff line change
Expand Up @@ -892,6 +892,40 @@ void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.Get
}
}

[Test]
[TestCase(100, "{\"song\": \"Pinball Wizard\", \"album\": \"Tommy\", \"artist\": \"The Who\"}")]
[TestCase(10, null)]
public async Task TestPostgresJsonCopyFrom(int batchSize, string cJson)
{
JsonElement? cParsedJson = null;
if (cJson != null)
cParsedJson = JsonDocument.Parse(cJson).RootElement;
var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CJson = cParsedJson, CJsonb = cParsedJson }).ToList();
await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs);
var expected = new QuerySql.GetPostgresSpecialTypesCntRow
{
Cnt = batchSize,
CJson = cParsedJson,
CJsonb = cParsedJson
};
var actual = await QuerySql.GetPostgresSpecialTypesCnt();
AssertSingularEquals(expected, actual.Value);
void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y)
{
var options = new JsonSerializerOptions
{
WriteIndented = false
};
Assert.That(y.Cnt, Is.EqualTo(x.Cnt));
Assert.That(y.CJson.HasValue, Is.EqualTo(x.CJson.HasValue));
if (y.CJson.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJson.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJson.Value, options)));
Assert.That(y.CJsonb.HasValue, Is.EqualTo(x.CJsonb.HasValue));
if (y.CJsonb.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJsonb.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJsonb.Value, options)));
}
}

private static IEnumerable<TestCaseData> PostgresNetworkCopyFromTestCases
{
get
Expand Down
34 changes: 34 additions & 0 deletions end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs
Original file line number Diff line number Diff line change
Expand Up @@ -892,6 +892,40 @@ void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.Get
}
}

[Test]
[TestCase(100, "{\"song\": \"Pinball Wizard\", \"album\": \"Tommy\", \"artist\": \"The Who\"}")]
[TestCase(10, null)]
public async Task TestPostgresJsonCopyFrom(int batchSize, string cJson)
{
JsonElement? cParsedJson = null;
if (cJson != null)
cParsedJson = JsonDocument.Parse(cJson).RootElement;
var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CJson = cParsedJson, CJsonb = cParsedJson }).ToList();
await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs);
var expected = new QuerySql.GetPostgresSpecialTypesCntRow
{
Cnt = batchSize,
CJson = cParsedJson,
CJsonb = cParsedJson
};
var actual = await QuerySql.GetPostgresSpecialTypesCnt();
AssertSingularEquals(expected, actual);
void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y)
{
var options = new JsonSerializerOptions
{
WriteIndented = false
};
Assert.That(y.Cnt, Is.EqualTo(x.Cnt));
Assert.That(y.CJson.HasValue, Is.EqualTo(x.CJson.HasValue));
if (y.CJson.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJson.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJson.Value, options)));
Assert.That(y.CJsonb.HasValue, Is.EqualTo(x.CJsonb.HasValue));
if (y.CJsonb.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJsonb.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJsonb.Value, options)));
}
}

private static IEnumerable<TestCaseData> PostgresNetworkCopyFromTestCases
{
get
Expand Down
34 changes: 34 additions & 0 deletions end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs
Original file line number Diff line number Diff line change
Expand Up @@ -892,6 +892,40 @@ void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.Get
}
}

[Test]
[TestCase(100, "{\"song\": \"Pinball Wizard\", \"album\": \"Tommy\", \"artist\": \"The Who\"}")]
[TestCase(10, null)]
public async Task TestPostgresJsonCopyFrom(int batchSize, string cJson)
{
JsonElement? cParsedJson = null;
if (cJson != null)
cParsedJson = JsonDocument.Parse(cJson).RootElement;
var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CJson = cParsedJson, CJsonb = cParsedJson }).ToList();
await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs);
var expected = new QuerySql.GetPostgresSpecialTypesCntRow
{
Cnt = batchSize,
CJson = cParsedJson,
CJsonb = cParsedJson
};
var actual = await QuerySql.GetPostgresSpecialTypesCnt();
AssertSingularEquals(expected, actual);
void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y)
{
var options = new JsonSerializerOptions
{
WriteIndented = false
};
Assert.That(y.Cnt, Is.EqualTo(x.Cnt));
Assert.That(y.CJson.HasValue, Is.EqualTo(x.CJson.HasValue));
if (y.CJson.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJson.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJson.Value, options)));
Assert.That(y.CJsonb.HasValue, Is.EqualTo(x.CJsonb.HasValue));
if (y.CJsonb.HasValue)
Assert.That(JsonSerializer.Serialize(y.CJsonb.Value, options), Is.EqualTo(JsonSerializer.Serialize(x.CJsonb.Value, options)));
}
}

private static IEnumerable<TestCaseData> PostgresNetworkCopyFromTestCases
{
get
Expand Down
Loading
Loading